fix for maybe_parse preventing parser from erroring on recursion limit (#1464)

This commit is contained in:
tomershaniii 2024-10-21 22:41:34 +03:00 committed by GitHub
parent 38f1e573fe
commit 8e0d26abb3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 422 additions and 570 deletions

View file

@ -3208,7 +3208,7 @@ pub enum Statement {
/// Table confs /// Table confs
options: Vec<SqlOption>, options: Vec<SqlOption>,
/// Cache table as a Query /// Cache table as a Query
query: Option<Query>, query: Option<Box<Query>>,
}, },
/// ```sql /// ```sql
/// UNCACHE TABLE [ IF EXISTS ] <table_name> /// UNCACHE TABLE [ IF EXISTS ] <table_name>
@ -6883,7 +6883,7 @@ impl fmt::Display for MacroArg {
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub enum MacroDefinition { pub enum MacroDefinition {
Expr(Expr), Expr(Expr),
Table(Query), Table(Box<Query>),
} }
impl fmt::Display for MacroDefinition { impl fmt::Display for MacroDefinition {

View file

@ -1103,7 +1103,7 @@ pub enum PivotValueSource {
/// Pivot on all values returned by a subquery. /// Pivot on all values returned by a subquery.
/// ///
/// See <https://docs.snowflake.com/en/sql-reference/constructs/pivot#pivot-on-column-values-using-a-subquery-with-dynamic-pivot>. /// See <https://docs.snowflake.com/en/sql-reference/constructs/pivot#pivot-on-column-values-using-a-subquery-with-dynamic-pivot>.
Subquery(Query), Subquery(Box<Query>),
} }
impl fmt::Display for PivotValueSource { impl fmt::Display for PivotValueSource {

View file

@ -486,9 +486,9 @@ pub trait Dialect: Debug + Any {
fn parse_column_option( fn parse_column_option(
&self, &self,
_parser: &mut Parser, _parser: &mut Parser,
) -> Option<Result<Option<ColumnOption>, ParserError>> { ) -> Result<Option<Result<Option<ColumnOption>, ParserError>>, ParserError> {
// return None to fall back to the default behavior // return None to fall back to the default behavior
None Ok(None)
} }
/// Decide the lexical Precedence of operators. /// Decide the lexical Precedence of operators.

View file

@ -156,7 +156,7 @@ impl Dialect for SnowflakeDialect {
fn parse_column_option( fn parse_column_option(
&self, &self,
parser: &mut Parser, parser: &mut Parser,
) -> Option<Result<Option<ColumnOption>, ParserError>> { ) -> Result<Option<Result<Option<ColumnOption>, ParserError>>, ParserError> {
parser.maybe_parse(|parser| { parser.maybe_parse(|parser| {
let with = parser.parse_keyword(Keyword::WITH); let with = parser.parse_keyword(Keyword::WITH);
@ -247,7 +247,7 @@ pub fn parse_create_table(
builder = builder.comment(parser.parse_optional_inline_comment()?); builder = builder.comment(parser.parse_optional_inline_comment()?);
} }
Keyword::AS => { Keyword::AS => {
let query = parser.parse_boxed_query()?; let query = parser.parse_query()?;
builder = builder.query(Some(query)); builder = builder.query(Some(query));
break; break;
} }

View file

@ -192,7 +192,7 @@ impl<'a> Parser<'a> {
let _ = self.parse_keyword(Keyword::WITH); let _ = self.parse_keyword(Keyword::WITH);
// option // option
let mut options = vec![]; let mut options = vec![];
while let Some(opt) = self.maybe_parse(|parser| parser.parse_pg_role_option()) { while let Some(opt) = self.maybe_parse(|parser| parser.parse_pg_role_option())? {
options.push(opt); options.push(opt);
} }
// check option // check option

View file

@ -478,7 +478,7 @@ impl<'a> Parser<'a> {
Keyword::ANALYZE => self.parse_analyze(), Keyword::ANALYZE => self.parse_analyze(),
Keyword::SELECT | Keyword::WITH | Keyword::VALUES => { Keyword::SELECT | Keyword::WITH | Keyword::VALUES => {
self.prev_token(); self.prev_token();
self.parse_boxed_query().map(Statement::Query) self.parse_query().map(Statement::Query)
} }
Keyword::TRUNCATE => self.parse_truncate(), Keyword::TRUNCATE => self.parse_truncate(),
Keyword::ATTACH => { Keyword::ATTACH => {
@ -551,7 +551,7 @@ impl<'a> Parser<'a> {
}, },
Token::LParen => { Token::LParen => {
self.prev_token(); self.prev_token();
self.parse_boxed_query().map(Statement::Query) self.parse_query().map(Statement::Query)
} }
_ => self.expected("an SQL statement", next_token), _ => self.expected("an SQL statement", next_token),
} }
@ -662,7 +662,7 @@ impl<'a> Parser<'a> {
}; };
parser.expect_keyword(Keyword::PARTITIONS)?; parser.expect_keyword(Keyword::PARTITIONS)?;
Ok(pa) Ok(pa)
}) })?
.unwrap_or_default(); .unwrap_or_default();
Ok(Statement::Msck { Ok(Statement::Msck {
repair, repair,
@ -829,7 +829,7 @@ impl<'a> Parser<'a> {
columns = self columns = self
.maybe_parse(|parser| { .maybe_parse(|parser| {
parser.parse_comma_separated(|p| p.parse_identifier(false)) parser.parse_comma_separated(|p| p.parse_identifier(false))
}) })?
.unwrap_or_default(); .unwrap_or_default();
for_columns = true for_columns = true
} }
@ -986,7 +986,7 @@ impl<'a> Parser<'a> {
value: parser.parse_literal_string()?, value: parser.parse_literal_string()?,
}), }),
} }
}); })?;
if let Some(expr) = opt_expr { if let Some(expr) = opt_expr {
return Ok(expr); return Ok(expr);
@ -1061,7 +1061,7 @@ impl<'a> Parser<'a> {
&& !dialect_of!(self is ClickHouseDialect | DatabricksDialect) => && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
{ {
self.expect_token(&Token::LParen)?; self.expect_token(&Token::LParen)?;
let query = self.parse_boxed_query()?; let query = self.parse_query()?;
self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?;
Ok(Expr::Function(Function { Ok(Expr::Function(Function {
name: ObjectName(vec![w.to_ident()]), name: ObjectName(vec![w.to_ident()]),
@ -1228,7 +1228,7 @@ impl<'a> Parser<'a> {
Token::LParen => { Token::LParen => {
let expr = if let Some(expr) = self.try_parse_expr_sub_query()? { let expr = if let Some(expr) = self.try_parse_expr_sub_query()? {
expr expr
} else if let Some(lambda) = self.try_parse_lambda() { } else if let Some(lambda) = self.try_parse_lambda()? {
return Ok(lambda); return Ok(lambda);
} else { } else {
let exprs = self.parse_comma_separated(Parser::parse_expr)?; let exprs = self.parse_comma_separated(Parser::parse_expr)?;
@ -1307,12 +1307,12 @@ impl<'a> Parser<'a> {
return Ok(None); return Ok(None);
} }
Ok(Some(Expr::Subquery(self.parse_boxed_query()?))) Ok(Some(Expr::Subquery(self.parse_query()?)))
} }
fn try_parse_lambda(&mut self) -> Option<Expr> { fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
if !self.dialect.supports_lambda_functions() { if !self.dialect.supports_lambda_functions() {
return None; return Ok(None);
} }
self.maybe_parse(|p| { self.maybe_parse(|p| {
let params = p.parse_comma_separated(|p| p.parse_identifier(false))?; let params = p.parse_comma_separated(|p| p.parse_identifier(false))?;
@ -1332,7 +1332,7 @@ impl<'a> Parser<'a> {
// Snowflake permits a subquery to be passed as an argument without // Snowflake permits a subquery to be passed as an argument without
// an enclosing set of parens if it's the only argument. // an enclosing set of parens if it's the only argument.
if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() { if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
let subquery = self.parse_boxed_query()?; let subquery = self.parse_query()?;
self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?;
return Ok(Expr::Function(Function { return Ok(Expr::Function(Function {
name, name,
@ -1697,7 +1697,7 @@ impl<'a> Parser<'a> {
self.expect_token(&Token::LParen)?; self.expect_token(&Token::LParen)?;
let exists_node = Expr::Exists { let exists_node = Expr::Exists {
negated, negated,
subquery: self.parse_boxed_query()?, subquery: self.parse_query()?,
}; };
self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?;
Ok(exists_node) Ok(exists_node)
@ -1777,7 +1777,7 @@ impl<'a> Parser<'a> {
expr: Box::new(expr), expr: Box::new(expr),
r#in: Box::new(from), r#in: Box::new(from),
}) })
}); })?;
match position_expr { match position_expr {
Some(expr) => Ok(expr), Some(expr) => Ok(expr),
// Snowflake supports `position` as an ordinary function call // Snowflake supports `position` as an ordinary function call
@ -3032,7 +3032,7 @@ impl<'a> Parser<'a> {
self.prev_token(); self.prev_token();
Expr::InSubquery { Expr::InSubquery {
expr: Box::new(expr), expr: Box::new(expr),
subquery: self.parse_boxed_query()?, subquery: self.parse_query()?,
negated, negated,
} }
} else { } else {
@ -3513,17 +3513,19 @@ impl<'a> Parser<'a> {
} }
/// Run a parser method `f`, reverting back to the current position if unsuccessful. /// Run a parser method `f`, reverting back to the current position if unsuccessful.
#[must_use] pub fn maybe_parse<T, F>(&mut self, mut f: F) -> Result<Option<T>, ParserError>
pub fn maybe_parse<T, F>(&mut self, mut f: F) -> Option<T>
where where
F: FnMut(&mut Parser) -> Result<T, ParserError>, F: FnMut(&mut Parser) -> Result<T, ParserError>,
{ {
let index = self.index; let index = self.index;
if let Ok(t) = f(self) { match f(self) {
Some(t) Ok(t) => Ok(Some(t)),
} else { // Unwind stack if limit exceeded
self.index = index; Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
None Err(_) => {
self.index = index;
Ok(None)
}
} }
} }
@ -3759,7 +3761,7 @@ impl<'a> Parser<'a> {
} }
/// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX` /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX`
pub fn parse_as_query(&mut self) -> Result<(bool, Query), ParserError> { pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
match self.peek_token().token { match self.peek_token().token {
Token::Word(word) => match word.keyword { Token::Word(word) => match word.keyword {
Keyword::AS => { Keyword::AS => {
@ -4523,7 +4525,7 @@ impl<'a> Parser<'a> {
}; };
self.expect_keyword(Keyword::AS)?; self.expect_keyword(Keyword::AS)?;
let query = self.parse_boxed_query()?; let query = self.parse_query()?;
// Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here. // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect) let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
@ -5102,7 +5104,7 @@ impl<'a> Parser<'a> {
self.expect_keyword(Keyword::FOR)?; self.expect_keyword(Keyword::FOR)?;
let query = Some(self.parse_boxed_query()?); let query = Some(self.parse_query()?);
Ok(Statement::Declare { Ok(Statement::Declare {
stmts: vec![Declare { stmts: vec![Declare {
@ -5196,7 +5198,7 @@ impl<'a> Parser<'a> {
match self.peek_token().token { match self.peek_token().token {
Token::Word(w) if w.keyword == Keyword::SELECT => ( Token::Word(w) if w.keyword == Keyword::SELECT => (
Some(DeclareType::Cursor), Some(DeclareType::Cursor),
Some(self.parse_boxed_query()?), Some(self.parse_query()?),
None, None,
None, None,
), ),
@ -5889,7 +5891,7 @@ impl<'a> Parser<'a> {
// Parse optional `AS ( query )` // Parse optional `AS ( query )`
let query = if self.parse_keyword(Keyword::AS) { let query = if self.parse_keyword(Keyword::AS) {
Some(self.parse_boxed_query()?) Some(self.parse_query()?)
} else { } else {
None None
}; };
@ -6109,7 +6111,7 @@ impl<'a> Parser<'a> {
} }
pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> { pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
if let Some(option) = self.dialect.parse_column_option(self) { if let Some(option) = self.dialect.parse_column_option(self)? {
return option; return option;
} }
@ -6483,7 +6485,7 @@ impl<'a> Parser<'a> {
} }
// optional index name // optional index name
let index_name = self.parse_optional_indent(); let index_name = self.parse_optional_indent()?;
let index_type = self.parse_optional_using_then_index_type()?; let index_type = self.parse_optional_using_then_index_type()?;
let columns = self.parse_parenthesized_column_list(Mandatory, false)?; let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
@ -6504,7 +6506,7 @@ impl<'a> Parser<'a> {
self.expect_keyword(Keyword::KEY)?; self.expect_keyword(Keyword::KEY)?;
// optional index name // optional index name
let index_name = self.parse_optional_indent(); let index_name = self.parse_optional_indent()?;
let index_type = self.parse_optional_using_then_index_type()?; let index_type = self.parse_optional_using_then_index_type()?;
let columns = self.parse_parenthesized_column_list(Mandatory, false)?; let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
@ -6566,7 +6568,7 @@ impl<'a> Parser<'a> {
let name = match self.peek_token().token { let name = match self.peek_token().token {
Token::Word(word) if word.keyword == Keyword::USING => None, Token::Word(word) if word.keyword == Keyword::USING => None,
_ => self.parse_optional_indent(), _ => self.parse_optional_indent()?,
}; };
let index_type = self.parse_optional_using_then_index_type()?; let index_type = self.parse_optional_using_then_index_type()?;
@ -6597,7 +6599,7 @@ impl<'a> Parser<'a> {
let index_type_display = self.parse_index_type_display(); let index_type_display = self.parse_index_type_display();
let opt_index_name = self.parse_optional_indent(); let opt_index_name = self.parse_optional_indent()?;
let columns = self.parse_parenthesized_column_list(Mandatory, false)?; let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
@ -6679,7 +6681,7 @@ impl<'a> Parser<'a> {
/// Parse `[ident]`, mostly `ident` is name, like: /// Parse `[ident]`, mostly `ident` is name, like:
/// `window_name`, `index_name`, ... /// `window_name`, `index_name`, ...
pub fn parse_optional_indent(&mut self) -> Option<Ident> { pub fn parse_optional_indent(&mut self) -> Result<Option<Ident>, ParserError> {
self.maybe_parse(|parser| parser.parse_identifier(false)) self.maybe_parse(|parser| parser.parse_identifier(false))
} }
@ -7278,7 +7280,7 @@ impl<'a> Parser<'a> {
let with_options = self.parse_options(Keyword::WITH)?; let with_options = self.parse_options(Keyword::WITH)?;
self.expect_keyword(Keyword::AS)?; self.expect_keyword(Keyword::AS)?;
let query = self.parse_boxed_query()?; let query = self.parse_query()?;
Ok(Statement::AlterView { Ok(Statement::AlterView {
name, name,
@ -7317,7 +7319,7 @@ impl<'a> Parser<'a> {
pub fn parse_copy(&mut self) -> Result<Statement, ParserError> { pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
let source; let source;
if self.consume_token(&Token::LParen) { if self.consume_token(&Token::LParen) {
source = CopySource::Query(self.parse_boxed_query()?); source = CopySource::Query(self.parse_query()?);
self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?;
} else { } else {
let table_name = self.parse_object_name(false)?; let table_name = self.parse_object_name(false)?;
@ -7361,7 +7363,7 @@ impl<'a> Parser<'a> {
self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?;
} }
let mut legacy_options = vec![]; let mut legacy_options = vec![];
while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option()) { while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
legacy_options.push(opt); legacy_options.push(opt);
} }
let values = if let CopyTarget::Stdin = target { let values = if let CopyTarget::Stdin = target {
@ -7453,7 +7455,7 @@ impl<'a> Parser<'a> {
Some(Keyword::CSV) => CopyLegacyOption::Csv({ Some(Keyword::CSV) => CopyLegacyOption::Csv({
let mut opts = vec![]; let mut opts = vec![];
while let Some(opt) = while let Some(opt) =
self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option()) self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
{ {
opts.push(opt); opts.push(opt);
} }
@ -8035,7 +8037,7 @@ impl<'a> Parser<'a> {
// Keyword::ARRAY syntax from above // Keyword::ARRAY syntax from above
while self.consume_token(&Token::LBracket) { while self.consume_token(&Token::LBracket) {
let size = if dialect_of!(self is GenericDialect | DuckDbDialect | PostgreSqlDialect) { let size = if dialect_of!(self is GenericDialect | DuckDbDialect | PostgreSqlDialect) {
self.maybe_parse(|p| p.parse_literal_uint()) self.maybe_parse(|p| p.parse_literal_uint())?
} else { } else {
None None
}; };
@ -8712,7 +8714,7 @@ impl<'a> Parser<'a> {
} }
} }
match self.maybe_parse(|parser| parser.parse_statement()) { match self.maybe_parse(|parser| parser.parse_statement())? {
Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err( Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
ParserError::ParserError("Explain must be root of the plan".to_string()), ParserError::ParserError("Explain must be root of the plan".to_string()),
), ),
@ -8751,20 +8753,11 @@ impl<'a> Parser<'a> {
} }
} }
/// Call's [`Self::parse_query`] returning a `Box`'ed result.
///
/// This function can be used to reduce the stack size required in debug
/// builds. Instead of `sizeof(Query)` only a pointer (`Box<Query>`)
/// is used.
pub fn parse_boxed_query(&mut self) -> Result<Box<Query>, ParserError> {
self.parse_query().map(Box::new)
}
/// Parse a query expression, i.e. a `SELECT` statement optionally /// Parse a query expression, i.e. a `SELECT` statement optionally
/// preceded with some `WITH` CTE declarations and optionally followed /// preceded with some `WITH` CTE declarations and optionally followed
/// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't
/// expect the initial keyword to be already consumed /// expect the initial keyword to be already consumed
pub fn parse_query(&mut self) -> Result<Query, ParserError> { pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
let _guard = self.recursion_counter.try_decrease()?; let _guard = self.recursion_counter.try_decrease()?;
let with = if self.parse_keyword(Keyword::WITH) { let with = if self.parse_keyword(Keyword::WITH) {
Some(With { Some(With {
@ -8787,7 +8780,8 @@ impl<'a> Parser<'a> {
for_clause: None, for_clause: None,
settings: None, settings: None,
format_clause: None, format_clause: None,
}) }
.into())
} else if self.parse_keyword(Keyword::UPDATE) { } else if self.parse_keyword(Keyword::UPDATE) {
Ok(Query { Ok(Query {
with, with,
@ -8801,9 +8795,10 @@ impl<'a> Parser<'a> {
for_clause: None, for_clause: None,
settings: None, settings: None,
format_clause: None, format_clause: None,
}) }
.into())
} else { } else {
let body = self.parse_boxed_query_body(self.dialect.prec_unknown())?; let body = self.parse_query_body(self.dialect.prec_unknown())?;
let order_by = self.parse_optional_order_by()?; let order_by = self.parse_optional_order_by()?;
@ -8885,7 +8880,8 @@ impl<'a> Parser<'a> {
for_clause, for_clause,
settings, settings,
format_clause, format_clause,
}) }
.into())
} }
} }
@ -9022,7 +9018,7 @@ impl<'a> Parser<'a> {
} }
} }
self.expect_token(&Token::LParen)?; self.expect_token(&Token::LParen)?;
let query = self.parse_boxed_query()?; let query = self.parse_query()?;
self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?;
let alias = TableAlias { let alias = TableAlias {
name, name,
@ -9046,7 +9042,7 @@ impl<'a> Parser<'a> {
} }
} }
self.expect_token(&Token::LParen)?; self.expect_token(&Token::LParen)?;
let query = self.parse_boxed_query()?; let query = self.parse_query()?;
self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?;
let alias = TableAlias { name, columns }; let alias = TableAlias { name, columns };
Cte { Cte {
@ -9062,15 +9058,6 @@ impl<'a> Parser<'a> {
Ok(cte) Ok(cte)
} }
/// Call's [`Self::parse_query_body`] returning a `Box`'ed result.
///
/// This function can be used to reduce the stack size required in debug
/// builds. Instead of `sizeof(QueryBody)` only a pointer (`Box<QueryBody>`)
/// is used.
fn parse_boxed_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
self.parse_query_body(precedence).map(Box::new)
}
/// Parse a "query body", which is an expression with roughly the /// Parse a "query body", which is an expression with roughly the
/// following grammar: /// following grammar:
/// ```sql /// ```sql
@ -9079,17 +9066,14 @@ impl<'a> Parser<'a> {
/// subquery ::= query_body [ order_by_limit ] /// subquery ::= query_body [ order_by_limit ]
/// set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body /// set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body
/// ``` /// ```
/// pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
/// If you need `Box<SetExpr>` then maybe there is sense to use `parse_boxed_query_body`
/// due to prevent stack overflow in debug building(to reserve less memory on stack).
pub fn parse_query_body(&mut self, precedence: u8) -> Result<SetExpr, ParserError> {
// We parse the expression using a Pratt parser, as in `parse_expr()`. // We parse the expression using a Pratt parser, as in `parse_expr()`.
// Start by parsing a restricted SELECT or a `(subquery)`: // Start by parsing a restricted SELECT or a `(subquery)`:
let expr = if self.parse_keyword(Keyword::SELECT) { let expr = if self.parse_keyword(Keyword::SELECT) {
SetExpr::Select(self.parse_select().map(Box::new)?) SetExpr::Select(self.parse_select().map(Box::new)?)
} else if self.consume_token(&Token::LParen) { } else if self.consume_token(&Token::LParen) {
// CTEs are not allowed here, but the parser currently accepts them // CTEs are not allowed here, but the parser currently accepts them
let subquery = self.parse_boxed_query()?; let subquery = self.parse_query()?;
self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?;
SetExpr::Query(subquery) SetExpr::Query(subquery)
} else if self.parse_keyword(Keyword::VALUES) { } else if self.parse_keyword(Keyword::VALUES) {
@ -9114,7 +9098,7 @@ impl<'a> Parser<'a> {
&mut self, &mut self,
mut expr: SetExpr, mut expr: SetExpr,
precedence: u8, precedence: u8,
) -> Result<SetExpr, ParserError> { ) -> Result<Box<SetExpr>, ParserError> {
loop { loop {
// The query can be optionally followed by a set operator: // The query can be optionally followed by a set operator:
let op = self.parse_set_operator(&self.peek_token().token); let op = self.parse_set_operator(&self.peek_token().token);
@ -9135,11 +9119,11 @@ impl<'a> Parser<'a> {
left: Box::new(expr), left: Box::new(expr),
op: op.unwrap(), op: op.unwrap(),
set_quantifier, set_quantifier,
right: self.parse_boxed_query_body(next_precedence)?, right: self.parse_query_body(next_precedence)?,
}; };
} }
Ok(expr) Ok(expr.into())
} }
pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> { pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
@ -9466,7 +9450,7 @@ impl<'a> Parser<'a> {
if let Some(Keyword::HIVEVAR) = modifier { if let Some(Keyword::HIVEVAR) = modifier {
self.expect_token(&Token::Colon)?; self.expect_token(&Token::Colon)?;
} else if let Some(set_role_stmt) = } else if let Some(set_role_stmt) =
self.maybe_parse(|parser| parser.parse_set_role(modifier)) self.maybe_parse(|parser| parser.parse_set_role(modifier))?
{ {
return Ok(set_role_stmt); return Ok(set_role_stmt);
} }
@ -9932,7 +9916,7 @@ impl<'a> Parser<'a> {
// subquery, followed by the closing ')', and the alias of the derived table. // subquery, followed by the closing ')', and the alias of the derived table.
// In the example above this is case (3). // In the example above this is case (3).
if let Some(mut table) = if let Some(mut table) =
self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral)) self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
{ {
while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
{ {
@ -10462,7 +10446,7 @@ impl<'a> Parser<'a> {
&mut self, &mut self,
lateral: IsLateral, lateral: IsLateral,
) -> Result<TableFactor, ParserError> { ) -> Result<TableFactor, ParserError> {
let subquery = self.parse_boxed_query()?; let subquery = self.parse_query()?;
self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?;
let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?;
Ok(TableFactor::Derived { Ok(TableFactor::Derived {
@ -10836,7 +10820,7 @@ impl<'a> Parser<'a> {
} else { } else {
None None
}; };
let source = self.parse_boxed_query()?; let source = self.parse_query()?;
Ok(Statement::Directory { Ok(Statement::Directory {
local, local,
path, path,
@ -10872,7 +10856,7 @@ impl<'a> Parser<'a> {
vec![] vec![]
}; };
let source = Some(self.parse_boxed_query()?); let source = Some(self.parse_query()?);
(columns, partitioned, after_columns, source) (columns, partitioned, after_columns, source)
}; };
@ -11786,7 +11770,7 @@ impl<'a> Parser<'a> {
pub fn parse_unload(&mut self) -> Result<Statement, ParserError> { pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
self.expect_token(&Token::LParen)?; self.expect_token(&Token::LParen)?;
let query = self.parse_boxed_query()?; let query = self.parse_query()?;
self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?;
self.expect_keyword(Keyword::TO)?; self.expect_keyword(Keyword::TO)?;
@ -12130,7 +12114,9 @@ impl<'a> Parser<'a> {
pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> { pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
let window_name = match self.peek_token().token { let window_name = match self.peek_token().token {
Token::Word(word) if word.keyword == Keyword::NoKeyword => self.parse_optional_indent(), Token::Word(word) if word.keyword == Keyword::NoKeyword => {
self.parse_optional_indent()?
}
_ => None, _ => None,
}; };
@ -12342,10 +12328,8 @@ mod tests {
#[test] #[test]
fn test_ansii_character_string_types() { fn test_ansii_character_string_types() {
// Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type> // Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type>
let dialect = TestedDialects { let dialect =
dialects: vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})], TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
options: None,
};
test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None)); test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
@ -12472,10 +12456,8 @@ mod tests {
#[test] #[test]
fn test_ansii_character_large_object_types() { fn test_ansii_character_large_object_types() {
// Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length> // Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length>
let dialect = TestedDialects { let dialect =
dialects: vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})], TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
options: None,
};
test_parse_data_type!( test_parse_data_type!(
dialect, dialect,
@ -12505,10 +12487,9 @@ mod tests {
#[test] #[test]
fn test_parse_custom_types() { fn test_parse_custom_types() {
let dialect = TestedDialects { let dialect =
dialects: vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})], TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
options: None,
};
test_parse_data_type!( test_parse_data_type!(
dialect, dialect,
"GEOMETRY", "GEOMETRY",
@ -12537,10 +12518,8 @@ mod tests {
#[test] #[test]
fn test_ansii_exact_numeric_types() { fn test_ansii_exact_numeric_types() {
// Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type> // Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type>
let dialect = TestedDialects { let dialect =
dialects: vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})], TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
options: None,
};
test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None)); test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
@ -12588,10 +12567,8 @@ mod tests {
#[test] #[test]
fn test_ansii_date_type() { fn test_ansii_date_type() {
// Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type> // Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type>
let dialect = TestedDialects { let dialect =
dialects: vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})], TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
options: None,
};
test_parse_data_type!(dialect, "DATE", DataType::Date); test_parse_data_type!(dialect, "DATE", DataType::Date);
@ -12700,10 +12677,8 @@ mod tests {
}}; }};
} }
let dialect = TestedDialects { let dialect =
dialects: vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})], TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
options: None,
};
test_parse_table_constraint!( test_parse_table_constraint!(
dialect, dialect,
@ -12822,10 +12797,7 @@ mod tests {
#[test] #[test]
fn test_parse_multipart_identifier_positive() { fn test_parse_multipart_identifier_positive() {
let dialect = TestedDialects { let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
dialects: vec![Box::new(GenericDialect {})],
options: None,
};
// parse multipart with quotes // parse multipart with quotes
let expected = vec![ let expected = vec![

View file

@ -44,6 +44,7 @@ use pretty_assertions::assert_eq;
pub struct TestedDialects { pub struct TestedDialects {
pub dialects: Vec<Box<dyn Dialect>>, pub dialects: Vec<Box<dyn Dialect>>,
pub options: Option<ParserOptions>, pub options: Option<ParserOptions>,
pub recursion_limit: Option<usize>,
} }
impl TestedDialects { impl TestedDialects {
@ -52,16 +53,38 @@ impl TestedDialects {
Self { Self {
dialects, dialects,
options: None, options: None,
recursion_limit: None,
} }
} }
pub fn new_with_options(dialects: Vec<Box<dyn Dialect>>, options: ParserOptions) -> Self {
Self {
dialects,
options: Some(options),
recursion_limit: None,
}
}
pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
self.recursion_limit = Some(recursion_limit);
self
}
fn new_parser<'a>(&self, dialect: &'a dyn Dialect) -> Parser<'a> { fn new_parser<'a>(&self, dialect: &'a dyn Dialect) -> Parser<'a> {
let parser = Parser::new(dialect); let parser = Parser::new(dialect);
if let Some(options) = &self.options { let parser = if let Some(options) = &self.options {
parser.with_options(options.clone()) parser.with_options(options.clone())
} else { } else {
parser parser
} };
let parser = if let Some(recursion_limit) = &self.recursion_limit {
parser.with_recursion_limit(*recursion_limit)
} else {
parser
};
parser
} }
/// Run the given function for all of `self.dialects`, assert that they /// Run the given function for all of `self.dialects`, assert that they

View file

@ -40,10 +40,10 @@ fn parse_literal_string() {
r#""""triple-double\"escaped""", "#, r#""""triple-double\"escaped""", "#,
r#""""triple-double"unescaped""""#, r#""""triple-double"unescaped""""#,
); );
let dialect = TestedDialects { let dialect = TestedDialects::new_with_options(
dialects: vec![Box::new(BigQueryDialect {})], vec![Box::new(BigQueryDialect {})],
options: Some(ParserOptions::new().with_unescape(false)), ParserOptions::new().with_unescape(false),
}; );
let select = dialect.verified_only_select(sql); let select = dialect.verified_only_select(sql);
assert_eq!(10, select.projection.len()); assert_eq!(10, select.projection.len());
assert_eq!( assert_eq!(
@ -1936,17 +1936,14 @@ fn parse_big_query_declare() {
} }
fn bigquery() -> TestedDialects { fn bigquery() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(BigQueryDialect {})])
dialects: vec![Box::new(BigQueryDialect {})],
options: None,
}
} }
fn bigquery_and_generic() -> TestedDialects { fn bigquery_and_generic() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![
dialects: vec![Box::new(BigQueryDialect {}), Box::new(GenericDialect {})], Box::new(BigQueryDialect {}),
options: None, Box::new(GenericDialect {}),
} ])
} }
#[test] #[test]

View file

@ -1613,15 +1613,12 @@ fn parse_explain_table() {
} }
fn clickhouse() -> TestedDialects { fn clickhouse() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(ClickHouseDialect {})])
dialects: vec![Box::new(ClickHouseDialect {})],
options: None,
}
} }
fn clickhouse_and_generic() -> TestedDialects { fn clickhouse_and_generic() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![
dialects: vec![Box::new(ClickHouseDialect {}), Box::new(GenericDialect {})], Box::new(ClickHouseDialect {}),
options: None, Box::new(GenericDialect {}),
} ])
} }

View file

@ -341,19 +341,16 @@ fn parse_update() {
#[test] #[test]
fn parse_update_set_from() { fn parse_update_set_from() {
let sql = "UPDATE t1 SET name = t2.name FROM (SELECT name, id FROM t1 GROUP BY id) AS t2 WHERE t1.id = t2.id"; let sql = "UPDATE t1 SET name = t2.name FROM (SELECT name, id FROM t1 GROUP BY id) AS t2 WHERE t1.id = t2.id";
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(DuckDbDialect {}),
Box::new(DuckDbDialect {}), Box::new(PostgreSqlDialect {}),
Box::new(PostgreSqlDialect {}), Box::new(BigQueryDialect {}),
Box::new(BigQueryDialect {}), Box::new(SnowflakeDialect {}),
Box::new(SnowflakeDialect {}), Box::new(RedshiftSqlDialect {}),
Box::new(RedshiftSqlDialect {}), Box::new(MsSqlDialect {}),
Box::new(MsSqlDialect {}), Box::new(SQLiteDialect {}),
Box::new(SQLiteDialect {}), ]);
],
options: None,
};
let stmt = dialects.verified_stmt(sql); let stmt = dialects.verified_stmt(sql);
assert_eq!( assert_eq!(
stmt, stmt,
@ -1051,10 +1048,7 @@ fn test_eof_after_as() {
#[test] #[test]
fn test_no_infix_error() { fn test_no_infix_error() {
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![Box::new(ClickHouseDialect {})]);
dialects: vec![Box::new(ClickHouseDialect {})],
options: None,
};
let res = dialects.parse_sql_statements("ASSERT-URA<<"); let res = dialects.parse_sql_statements("ASSERT-URA<<");
assert_eq!( assert_eq!(
@ -1182,23 +1176,20 @@ fn parse_null_in_select() {
#[test] #[test]
fn parse_exponent_in_select() -> Result<(), ParserError> { fn parse_exponent_in_select() -> Result<(), ParserError> {
// all except Hive, as it allows numbers to start an identifier // all except Hive, as it allows numbers to start an identifier
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(AnsiDialect {}),
Box::new(AnsiDialect {}), Box::new(BigQueryDialect {}),
Box::new(BigQueryDialect {}), Box::new(ClickHouseDialect {}),
Box::new(ClickHouseDialect {}), Box::new(DuckDbDialect {}),
Box::new(DuckDbDialect {}), Box::new(GenericDialect {}),
Box::new(GenericDialect {}), // Box::new(HiveDialect {}),
// Box::new(HiveDialect {}), Box::new(MsSqlDialect {}),
Box::new(MsSqlDialect {}), Box::new(MySqlDialect {}),
Box::new(MySqlDialect {}), Box::new(PostgreSqlDialect {}),
Box::new(PostgreSqlDialect {}), Box::new(RedshiftSqlDialect {}),
Box::new(RedshiftSqlDialect {}), Box::new(SnowflakeDialect {}),
Box::new(SnowflakeDialect {}), Box::new(SQLiteDialect {}),
Box::new(SQLiteDialect {}), ]);
],
options: None,
};
let sql = "SELECT 10e-20, 1e3, 1e+3, 1e3a, 1e, 0.5e2"; let sql = "SELECT 10e-20, 1e3, 1e+3, 1e3a, 1e, 0.5e2";
let mut select = dialects.parse_sql_statements(sql)?; let mut select = dialects.parse_sql_statements(sql)?;
@ -1271,14 +1262,12 @@ fn parse_escaped_single_quote_string_predicate_with_no_escape() {
let sql = "SELECT id, fname, lname FROM customer \ let sql = "SELECT id, fname, lname FROM customer \
WHERE salary <> 'Jim''s salary'"; WHERE salary <> 'Jim''s salary'";
let ast = TestedDialects { let ast = TestedDialects::new_with_options(
dialects: vec![Box::new(MySqlDialect {})], vec![Box::new(MySqlDialect {})],
options: Some( ParserOptions::new()
ParserOptions::new() .with_trailing_commas(true)
.with_trailing_commas(true) .with_unescape(false),
.with_unescape(false), )
),
}
.verified_only_select(sql); .verified_only_select(sql);
assert_eq!( assert_eq!(
@ -1400,10 +1389,10 @@ fn parse_mod() {
} }
fn pg_and_generic() -> TestedDialects { fn pg_and_generic() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![
dialects: vec![Box::new(PostgreSqlDialect {}), Box::new(GenericDialect {})], Box::new(PostgreSqlDialect {}),
options: None, Box::new(GenericDialect {}),
} ])
} }
#[test] #[test]
@ -1868,14 +1857,13 @@ fn parse_string_agg() {
/// selects all dialects but PostgreSQL /// selects all dialects but PostgreSQL
pub fn all_dialects_but_pg() -> TestedDialects { pub fn all_dialects_but_pg() -> TestedDialects {
TestedDialects { TestedDialects::new(
dialects: all_dialects() all_dialects()
.dialects .dialects
.into_iter() .into_iter()
.filter(|x| !x.is::<PostgreSqlDialect>()) .filter(|x| !x.is::<PostgreSqlDialect>())
.collect(), .collect(),
options: None, )
}
} }
#[test] #[test]
@ -2691,17 +2679,14 @@ fn parse_listagg() {
#[test] #[test]
fn parse_array_agg_func() { fn parse_array_agg_func() {
let supported_dialects = TestedDialects { let supported_dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(DuckDbDialect {}),
Box::new(DuckDbDialect {}), Box::new(PostgreSqlDialect {}),
Box::new(PostgreSqlDialect {}), Box::new(MsSqlDialect {}),
Box::new(MsSqlDialect {}), Box::new(AnsiDialect {}),
Box::new(AnsiDialect {}), Box::new(HiveDialect {}),
Box::new(HiveDialect {}), ]);
],
options: None,
};
for sql in [ for sql in [
"SELECT ARRAY_AGG(x ORDER BY x) AS a FROM T", "SELECT ARRAY_AGG(x ORDER BY x) AS a FROM T",
@ -2716,16 +2701,13 @@ fn parse_array_agg_func() {
#[test] #[test]
fn parse_agg_with_order_by() { fn parse_agg_with_order_by() {
let supported_dialects = TestedDialects { let supported_dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(PostgreSqlDialect {}),
Box::new(PostgreSqlDialect {}), Box::new(MsSqlDialect {}),
Box::new(MsSqlDialect {}), Box::new(AnsiDialect {}),
Box::new(AnsiDialect {}), Box::new(HiveDialect {}),
Box::new(HiveDialect {}), ]);
],
options: None,
};
for sql in [ for sql in [
"SELECT FIRST_VALUE(x ORDER BY x) AS a FROM T", "SELECT FIRST_VALUE(x ORDER BY x) AS a FROM T",
@ -2739,17 +2721,14 @@ fn parse_agg_with_order_by() {
#[test] #[test]
fn parse_window_rank_function() { fn parse_window_rank_function() {
let supported_dialects = TestedDialects { let supported_dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(PostgreSqlDialect {}),
Box::new(PostgreSqlDialect {}), Box::new(MsSqlDialect {}),
Box::new(MsSqlDialect {}), Box::new(AnsiDialect {}),
Box::new(AnsiDialect {}), Box::new(HiveDialect {}),
Box::new(HiveDialect {}), Box::new(SnowflakeDialect {}),
Box::new(SnowflakeDialect {}), ]);
],
options: None,
};
for sql in [ for sql in [
"SELECT column1, column2, FIRST_VALUE(column2) OVER (PARTITION BY column1 ORDER BY column2 NULLS LAST) AS column2_first FROM t1", "SELECT column1, column2, FIRST_VALUE(column2) OVER (PARTITION BY column1 ORDER BY column2 NULLS LAST) AS column2_first FROM t1",
@ -2761,10 +2740,10 @@ fn parse_window_rank_function() {
supported_dialects.verified_stmt(sql); supported_dialects.verified_stmt(sql);
} }
let supported_dialects_nulls = TestedDialects { let supported_dialects_nulls = TestedDialects::new(vec![
dialects: vec![Box::new(MsSqlDialect {}), Box::new(SnowflakeDialect {})], Box::new(MsSqlDialect {}),
options: None, Box::new(SnowflakeDialect {}),
}; ]);
for sql in [ for sql in [
"SELECT column1, column2, FIRST_VALUE(column2) IGNORE NULLS OVER (PARTITION BY column1 ORDER BY column2 NULLS LAST) AS column2_first FROM t1", "SELECT column1, column2, FIRST_VALUE(column2) IGNORE NULLS OVER (PARTITION BY column1 ORDER BY column2 NULLS LAST) AS column2_first FROM t1",
@ -3321,10 +3300,7 @@ fn parse_create_table_hive_array() {
true, true,
), ),
] { ] {
let dialects = TestedDialects { let dialects = TestedDialects::new(dialects);
dialects,
options: None,
};
let sql = format!( let sql = format!(
"CREATE TABLE IF NOT EXISTS something (name INT, val {})", "CREATE TABLE IF NOT EXISTS something (name INT, val {})",
@ -3374,14 +3350,11 @@ fn parse_create_table_hive_array() {
} }
// SnowflakeDialect using array different // SnowflakeDialect using array different
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(PostgreSqlDialect {}),
Box::new(PostgreSqlDialect {}), Box::new(HiveDialect {}),
Box::new(HiveDialect {}), Box::new(MySqlDialect {}),
Box::new(MySqlDialect {}), ]);
],
options: None,
};
let sql = "CREATE TABLE IF NOT EXISTS something (name int, val array<int)"; let sql = "CREATE TABLE IF NOT EXISTS something (name int, val array<int)";
assert_eq!( assert_eq!(
@ -3586,10 +3559,7 @@ fn parse_create_table_as_table() {
#[test] #[test]
fn parse_create_table_on_cluster() { fn parse_create_table_on_cluster() {
let generic = TestedDialects { let generic = TestedDialects::new(vec![Box::new(GenericDialect {})]);
dialects: vec![Box::new(GenericDialect {})],
options: None,
};
// Using single-quote literal to define current cluster // Using single-quote literal to define current cluster
let sql = "CREATE TABLE t ON CLUSTER '{cluster}' (a INT, b INT)"; let sql = "CREATE TABLE t ON CLUSTER '{cluster}' (a INT, b INT)";
@ -3655,10 +3625,7 @@ fn parse_create_table_with_on_delete_on_update_2in_any_order() -> Result<(), Par
#[test] #[test]
fn parse_create_table_with_options() { fn parse_create_table_with_options() {
let generic = TestedDialects { let generic = TestedDialects::new(vec![Box::new(GenericDialect {})]);
dialects: vec![Box::new(GenericDialect {})],
options: None,
};
let sql = "CREATE TABLE t (c INT) WITH (foo = 'bar', a = 123)"; let sql = "CREATE TABLE t (c INT) WITH (foo = 'bar', a = 123)";
match generic.verified_stmt(sql) { match generic.verified_stmt(sql) {
@ -3695,10 +3662,7 @@ fn parse_create_table_clone() {
#[test] #[test]
fn parse_create_table_trailing_comma() { fn parse_create_table_trailing_comma() {
let dialect = TestedDialects { let dialect = TestedDialects::new(vec![Box::new(DuckDbDialect {})]);
dialects: vec![Box::new(DuckDbDialect {})],
options: None,
};
let sql = "CREATE TABLE foo (bar int,);"; let sql = "CREATE TABLE foo (bar int,);";
dialect.one_statement_parses_to(sql, "CREATE TABLE foo (bar INT)"); dialect.one_statement_parses_to(sql, "CREATE TABLE foo (bar INT)");
@ -4040,15 +4004,12 @@ fn parse_alter_table_add_column() {
#[test] #[test]
fn parse_alter_table_add_column_if_not_exists() { fn parse_alter_table_add_column_if_not_exists() {
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(PostgreSqlDialect {}),
Box::new(PostgreSqlDialect {}), Box::new(BigQueryDialect {}),
Box::new(BigQueryDialect {}), Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(DuckDbDialect {}),
Box::new(DuckDbDialect {}), ]);
],
options: None,
};
match alter_table_op(dialects.verified_stmt("ALTER TABLE tab ADD IF NOT EXISTS foo TEXT")) { match alter_table_op(dialects.verified_stmt("ALTER TABLE tab ADD IF NOT EXISTS foo TEXT")) {
AlterTableOperation::AddColumn { if_not_exists, .. } => { AlterTableOperation::AddColumn { if_not_exists, .. } => {
@ -4191,10 +4152,7 @@ fn parse_alter_table_alter_column_type() {
_ => unreachable!(), _ => unreachable!(),
} }
let dialect = TestedDialects { let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
dialects: vec![Box::new(GenericDialect {})],
options: None,
};
let res = let res =
dialect.parse_sql_statements(&format!("{alter_stmt} ALTER COLUMN is_active TYPE TEXT")); dialect.parse_sql_statements(&format!("{alter_stmt} ALTER COLUMN is_active TYPE TEXT"));
@ -4611,15 +4569,12 @@ fn parse_window_functions() {
#[test] #[test]
fn parse_named_window_functions() { fn parse_named_window_functions() {
let supported_dialects = TestedDialects { let supported_dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(PostgreSqlDialect {}),
Box::new(PostgreSqlDialect {}), Box::new(MySqlDialect {}),
Box::new(MySqlDialect {}), Box::new(BigQueryDialect {}),
Box::new(BigQueryDialect {}), ]);
],
options: None,
};
let sql = "SELECT row_number() OVER (w ORDER BY dt DESC), \ let sql = "SELECT row_number() OVER (w ORDER BY dt DESC), \
sum(foo) OVER (win PARTITION BY a, b ORDER BY c, d \ sum(foo) OVER (win PARTITION BY a, b ORDER BY c, d \
@ -5684,10 +5639,10 @@ fn parse_unnest_in_from_clause() {
let select = dialects.verified_only_select(sql); let select = dialects.verified_only_select(sql);
assert_eq!(select.from, want); assert_eq!(select.from, want);
} }
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![Box::new(BigQueryDialect {}), Box::new(GenericDialect {})], Box::new(BigQueryDialect {}),
options: None, Box::new(GenericDialect {}),
}; ]);
// 1. both Alias and WITH OFFSET clauses. // 1. both Alias and WITH OFFSET clauses.
chk( chk(
"expr", "expr",
@ -6670,22 +6625,20 @@ fn parse_trim() {
); );
//keep Snowflake/BigQuery TRIM syntax failing //keep Snowflake/BigQuery TRIM syntax failing
let all_expected_snowflake = TestedDialects { let all_expected_snowflake = TestedDialects::new(vec![
dialects: vec![ //Box::new(GenericDialect {}),
//Box::new(GenericDialect {}), Box::new(PostgreSqlDialect {}),
Box::new(PostgreSqlDialect {}), Box::new(MsSqlDialect {}),
Box::new(MsSqlDialect {}), Box::new(AnsiDialect {}),
Box::new(AnsiDialect {}), //Box::new(SnowflakeDialect {}),
//Box::new(SnowflakeDialect {}), Box::new(HiveDialect {}),
Box::new(HiveDialect {}), Box::new(RedshiftSqlDialect {}),
Box::new(RedshiftSqlDialect {}), Box::new(MySqlDialect {}),
Box::new(MySqlDialect {}), //Box::new(BigQueryDialect {}),
//Box::new(BigQueryDialect {}), Box::new(SQLiteDialect {}),
Box::new(SQLiteDialect {}), Box::new(DuckDbDialect {}),
Box::new(DuckDbDialect {}), ]);
],
options: None,
};
assert_eq!( assert_eq!(
ParserError::ParserError("Expected: ), found: 'a'".to_owned()), ParserError::ParserError("Expected: ), found: 'a'".to_owned()),
all_expected_snowflake all_expected_snowflake
@ -8582,20 +8535,17 @@ fn test_lock_nonblock() {
#[test] #[test]
fn test_placeholder() { fn test_placeholder() {
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(DuckDbDialect {}),
Box::new(DuckDbDialect {}), Box::new(PostgreSqlDialect {}),
Box::new(PostgreSqlDialect {}), Box::new(MsSqlDialect {}),
Box::new(MsSqlDialect {}), Box::new(AnsiDialect {}),
Box::new(AnsiDialect {}), Box::new(BigQueryDialect {}),
Box::new(BigQueryDialect {}), Box::new(SnowflakeDialect {}),
Box::new(SnowflakeDialect {}), // Note: `$` is the starting word for the HiveDialect identifier
// Note: `$` is the starting word for the HiveDialect identifier // Box::new(sqlparser::dialect::HiveDialect {}),
// Box::new(sqlparser::dialect::HiveDialect {}), ]);
],
options: None,
};
let sql = "SELECT * FROM student WHERE id = $Id1"; let sql = "SELECT * FROM student WHERE id = $Id1";
let ast = dialects.verified_only_select(sql); let ast = dialects.verified_only_select(sql);
assert_eq!( assert_eq!(
@ -8621,21 +8571,18 @@ fn test_placeholder() {
}), }),
); );
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(DuckDbDialect {}),
Box::new(DuckDbDialect {}), // Note: `?` is for jsonb operators in PostgreSqlDialect
// Note: `?` is for jsonb operators in PostgreSqlDialect // Box::new(PostgreSqlDialect {}),
// Box::new(PostgreSqlDialect {}), Box::new(MsSqlDialect {}),
Box::new(MsSqlDialect {}), Box::new(AnsiDialect {}),
Box::new(AnsiDialect {}), Box::new(BigQueryDialect {}),
Box::new(BigQueryDialect {}), Box::new(SnowflakeDialect {}),
Box::new(SnowflakeDialect {}), // Note: `$` is the starting word for the HiveDialect identifier
// Note: `$` is the starting word for the HiveDialect identifier // Box::new(sqlparser::dialect::HiveDialect {}),
// Box::new(sqlparser::dialect::HiveDialect {}), ]);
],
options: None,
};
let sql = "SELECT * FROM student WHERE id = ?"; let sql = "SELECT * FROM student WHERE id = ?";
let ast = dialects.verified_only_select(sql); let ast = dialects.verified_only_select(sql);
assert_eq!( assert_eq!(
@ -9023,7 +8970,7 @@ fn parse_cache_table() {
value: Expr::Value(number("0.88")), value: Expr::Value(number("0.88")),
}, },
], ],
query: Some(query.clone()), query: Some(query.clone().into()),
} }
); );
@ -9048,7 +8995,7 @@ fn parse_cache_table() {
value: Expr::Value(number("0.88")), value: Expr::Value(number("0.88")),
}, },
], ],
query: Some(query.clone()), query: Some(query.clone().into()),
} }
); );
@ -9059,7 +9006,7 @@ fn parse_cache_table() {
table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]),
has_as: false, has_as: false,
options: vec![], options: vec![],
query: Some(query.clone()), query: Some(query.clone().into()),
} }
); );
@ -9070,7 +9017,7 @@ fn parse_cache_table() {
table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]),
has_as: true, has_as: true,
options: vec![], options: vec![],
query: Some(query), query: Some(query.into()),
} }
); );
@ -9243,14 +9190,11 @@ fn parse_with_recursion_limit() {
#[test] #[test]
fn parse_escaped_string_with_unescape() { fn parse_escaped_string_with_unescape() {
fn assert_mysql_query_value(sql: &str, quoted: &str) { fn assert_mysql_query_value(sql: &str, quoted: &str) {
let stmt = TestedDialects { let stmt = TestedDialects::new(vec![
dialects: vec![ Box::new(MySqlDialect {}),
Box::new(MySqlDialect {}), Box::new(BigQueryDialect {}),
Box::new(BigQueryDialect {}), Box::new(SnowflakeDialect {}),
Box::new(SnowflakeDialect {}), ])
],
options: None,
}
.one_statement_parses_to(sql, ""); .one_statement_parses_to(sql, "");
match stmt { match stmt {
@ -9283,14 +9227,14 @@ fn parse_escaped_string_with_unescape() {
#[test] #[test]
fn parse_escaped_string_without_unescape() { fn parse_escaped_string_without_unescape() {
fn assert_mysql_query_value(sql: &str, quoted: &str) { fn assert_mysql_query_value(sql: &str, quoted: &str) {
let stmt = TestedDialects { let stmt = TestedDialects::new_with_options(
dialects: vec![ vec![
Box::new(MySqlDialect {}), Box::new(MySqlDialect {}),
Box::new(BigQueryDialect {}), Box::new(BigQueryDialect {}),
Box::new(SnowflakeDialect {}), Box::new(SnowflakeDialect {}),
], ],
options: Some(ParserOptions::new().with_unescape(false)), ParserOptions::new().with_unescape(false),
} )
.one_statement_parses_to(sql, ""); .one_statement_parses_to(sql, "");
match stmt { match stmt {
@ -9558,17 +9502,14 @@ fn make_where_clause(num: usize) -> String {
#[test] #[test]
fn parse_non_latin_identifiers() { fn parse_non_latin_identifiers() {
let supported_dialects = TestedDialects { let supported_dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(DuckDbDialect {}),
Box::new(DuckDbDialect {}), Box::new(PostgreSqlDialect {}),
Box::new(PostgreSqlDialect {}), Box::new(MsSqlDialect {}),
Box::new(MsSqlDialect {}), Box::new(RedshiftSqlDialect {}),
Box::new(RedshiftSqlDialect {}), Box::new(MySqlDialect {}),
Box::new(MySqlDialect {}), ]);
],
options: None,
};
supported_dialects.verified_stmt("SELECT a.説明 FROM test.public.inter01 AS a"); supported_dialects.verified_stmt("SELECT a.説明 FROM test.public.inter01 AS a");
supported_dialects.verified_stmt("SELECT a.説明 FROM inter01 AS a, inter01_transactions AS b WHERE a.説明 = b.取引 GROUP BY a.説明"); supported_dialects.verified_stmt("SELECT a.説明 FROM inter01 AS a, inter01_transactions AS b WHERE a.説明 = b.取引 GROUP BY a.説明");
@ -9582,10 +9523,7 @@ fn parse_non_latin_identifiers() {
fn parse_trailing_comma() { fn parse_trailing_comma() {
// At the moment, DuckDB is the only dialect that allows // At the moment, DuckDB is the only dialect that allows
// trailing commas anywhere in the query // trailing commas anywhere in the query
let trailing_commas = TestedDialects { let trailing_commas = TestedDialects::new(vec![Box::new(DuckDbDialect {})]);
dialects: vec![Box::new(DuckDbDialect {})],
options: None,
};
trailing_commas.one_statement_parses_to( trailing_commas.one_statement_parses_to(
"SELECT album_id, name, FROM track", "SELECT album_id, name, FROM track",
@ -9624,10 +9562,7 @@ fn parse_trailing_comma() {
trailing_commas.verified_stmt(r#"SELECT "from" FROM "from""#); trailing_commas.verified_stmt(r#"SELECT "from" FROM "from""#);
// doesn't allow any trailing commas // doesn't allow any trailing commas
let trailing_commas = TestedDialects { let trailing_commas = TestedDialects::new(vec![Box::new(GenericDialect {})]);
dialects: vec![Box::new(GenericDialect {})],
options: None,
};
assert_eq!( assert_eq!(
trailing_commas trailing_commas
@ -9656,10 +9591,10 @@ fn parse_trailing_comma() {
#[test] #[test]
fn parse_projection_trailing_comma() { fn parse_projection_trailing_comma() {
// Some dialects allow trailing commas only in the projection // Some dialects allow trailing commas only in the projection
let trailing_commas = TestedDialects { let trailing_commas = TestedDialects::new(vec![
dialects: vec![Box::new(SnowflakeDialect {}), Box::new(BigQueryDialect {})], Box::new(SnowflakeDialect {}),
options: None, Box::new(BigQueryDialect {}),
}; ]);
trailing_commas.one_statement_parses_to( trailing_commas.one_statement_parses_to(
"SELECT album_id, name, FROM track", "SELECT album_id, name, FROM track",
@ -9946,14 +9881,11 @@ fn test_release_savepoint() {
#[test] #[test]
fn test_comment_hash_syntax() { fn test_comment_hash_syntax() {
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(BigQueryDialect {}),
Box::new(BigQueryDialect {}), Box::new(SnowflakeDialect {}),
Box::new(SnowflakeDialect {}), Box::new(MySqlDialect {}),
Box::new(MySqlDialect {}), ]);
],
options: None,
};
let sql = r#" let sql = r#"
# comment # comment
SELECT a, b, c # , d, e SELECT a, b, c # , d, e
@ -10013,10 +9945,10 @@ fn test_buffer_reuse() {
#[test] #[test]
fn parse_map_access_expr() { fn parse_map_access_expr() {
let sql = "users[-1][safe_offset(2)]"; let sql = "users[-1][safe_offset(2)]";
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![Box::new(BigQueryDialect {}), Box::new(ClickHouseDialect {})], Box::new(BigQueryDialect {}),
options: None, Box::new(ClickHouseDialect {}),
}; ]);
let expr = dialects.verified_expr(sql); let expr = dialects.verified_expr(sql);
let expected = Expr::MapAccess { let expected = Expr::MapAccess {
column: Expr::Identifier(Ident::new("users")).into(), column: Expr::Identifier(Ident::new("users")).into(),
@ -10591,16 +10523,13 @@ fn test_match_recognize_patterns() {
#[test] #[test]
fn test_select_wildcard_with_replace() { fn test_select_wildcard_with_replace() {
let sql = r#"SELECT * REPLACE (lower(city) AS city) FROM addresses"#; let sql = r#"SELECT * REPLACE (lower(city) AS city) FROM addresses"#;
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(BigQueryDialect {}),
Box::new(BigQueryDialect {}), Box::new(ClickHouseDialect {}),
Box::new(ClickHouseDialect {}), Box::new(SnowflakeDialect {}),
Box::new(SnowflakeDialect {}), Box::new(DuckDbDialect {}),
Box::new(DuckDbDialect {}), ]);
],
options: None,
};
let select = dialects.verified_only_select(sql); let select = dialects.verified_only_select(sql);
let expected = SelectItem::Wildcard(WildcardAdditionalOptions { let expected = SelectItem::Wildcard(WildcardAdditionalOptions {
opt_replace: Some(ReplaceSelectItem { opt_replace: Some(ReplaceSelectItem {
@ -10657,14 +10586,11 @@ fn test_select_wildcard_with_replace() {
#[test] #[test]
fn parse_sized_list() { fn parse_sized_list() {
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(PostgreSqlDialect {}),
Box::new(PostgreSqlDialect {}), Box::new(DuckDbDialect {}),
Box::new(DuckDbDialect {}), ]);
],
options: None,
};
let sql = r#"CREATE TABLE embeddings (data FLOAT[1536])"#; let sql = r#"CREATE TABLE embeddings (data FLOAT[1536])"#;
dialects.verified_stmt(sql); dialects.verified_stmt(sql);
let sql = r#"CREATE TABLE embeddings (data FLOAT[1536][3])"#; let sql = r#"CREATE TABLE embeddings (data FLOAT[1536][3])"#;
@ -10675,14 +10601,11 @@ fn parse_sized_list() {
#[test] #[test]
fn insert_into_with_parentheses() { fn insert_into_with_parentheses() {
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(SnowflakeDialect {}),
Box::new(SnowflakeDialect {}), Box::new(RedshiftSqlDialect {}),
Box::new(RedshiftSqlDialect {}), Box::new(GenericDialect {}),
Box::new(GenericDialect {}), ]);
],
options: None,
};
dialects.verified_stmt("INSERT INTO t1 (id, name) (SELECT t2.id, t2.name FROM t2)"); dialects.verified_stmt("INSERT INTO t1 (id, name) (SELECT t2.id, t2.name FROM t2)");
} }
@ -10850,14 +10773,11 @@ fn parse_within_group() {
#[test] #[test]
fn tests_select_values_without_parens() { fn tests_select_values_without_parens() {
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(SnowflakeDialect {}),
Box::new(SnowflakeDialect {}), Box::new(DatabricksDialect {}),
Box::new(DatabricksDialect {}), ]);
],
options: None,
};
let sql = "SELECT * FROM VALUES (1, 2), (2,3) AS tbl (id, val)"; let sql = "SELECT * FROM VALUES (1, 2), (2,3) AS tbl (id, val)";
let canonical = "SELECT * FROM (VALUES (1, 2), (2, 3)) AS tbl (id, val)"; let canonical = "SELECT * FROM (VALUES (1, 2), (2, 3)) AS tbl (id, val)";
dialects.verified_only_select_with_canonical(sql, canonical); dialects.verified_only_select_with_canonical(sql, canonical);
@ -10865,14 +10785,12 @@ fn tests_select_values_without_parens() {
#[test] #[test]
fn tests_select_values_without_parens_and_set_op() { fn tests_select_values_without_parens_and_set_op() {
let dialects = TestedDialects { let dialects = TestedDialects::new(vec![
dialects: vec![ Box::new(GenericDialect {}),
Box::new(GenericDialect {}), Box::new(SnowflakeDialect {}),
Box::new(SnowflakeDialect {}), Box::new(DatabricksDialect {}),
Box::new(DatabricksDialect {}), ]);
],
options: None,
};
let sql = "SELECT id + 1, name FROM VALUES (1, 'Apple'), (2, 'Banana'), (3, 'Orange') AS fruits (id, name) UNION ALL SELECT 5, 'Strawberry'"; let sql = "SELECT id + 1, name FROM VALUES (1, 'Apple'), (2, 'Banana'), (3, 'Orange') AS fruits (id, name) UNION ALL SELECT 5, 'Strawberry'";
let canonical = "SELECT id + 1, name FROM (VALUES (1, 'Apple'), (2, 'Banana'), (3, 'Orange')) AS fruits (id, name) UNION ALL SELECT 5, 'Strawberry'"; let canonical = "SELECT id + 1, name FROM (VALUES (1, 'Apple'), (2, 'Banana'), (3, 'Orange')) AS fruits (id, name) UNION ALL SELECT 5, 'Strawberry'";
let query = dialects.verified_query_with_canonical(sql, canonical); let query = dialects.verified_query_with_canonical(sql, canonical);

View file

@ -24,17 +24,14 @@ use test_utils::*;
mod test_utils; mod test_utils;
fn databricks() -> TestedDialects { fn databricks() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(DatabricksDialect {})])
dialects: vec![Box::new(DatabricksDialect {})],
options: None,
}
} }
fn databricks_and_generic() -> TestedDialects { fn databricks_and_generic() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![
dialects: vec![Box::new(DatabricksDialect {}), Box::new(GenericDialect {})], Box::new(DatabricksDialect {}),
options: None, Box::new(GenericDialect {}),
} ])
} }
#[test] #[test]

View file

@ -24,17 +24,14 @@ use sqlparser::ast::*;
use sqlparser::dialect::{DuckDbDialect, GenericDialect}; use sqlparser::dialect::{DuckDbDialect, GenericDialect};
fn duckdb() -> TestedDialects { fn duckdb() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(DuckDbDialect {})])
dialects: vec![Box::new(DuckDbDialect {})],
options: None,
}
} }
fn duckdb_and_generic() -> TestedDialects { fn duckdb_and_generic() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![
dialects: vec![Box::new(DuckDbDialect {}), Box::new(GenericDialect {})], Box::new(DuckDbDialect {}),
options: None, Box::new(GenericDialect {}),
} ])
} }
#[test] #[test]
@ -242,7 +239,7 @@ fn test_create_table_macro() {
MacroArg::new("col1_value"), MacroArg::new("col1_value"),
MacroArg::new("col2_value"), MacroArg::new("col2_value"),
]), ]),
definition: MacroDefinition::Table(duckdb().verified_query(query)), definition: MacroDefinition::Table(duckdb().verified_query(query).into()),
}; };
assert_eq!(expected, macro_); assert_eq!(expected, macro_);
} }

View file

@ -418,10 +418,7 @@ fn parse_create_function() {
} }
// Test error in dialect that doesn't support parsing CREATE FUNCTION // Test error in dialect that doesn't support parsing CREATE FUNCTION
let unsupported_dialects = TestedDialects { let unsupported_dialects = TestedDialects::new(vec![Box::new(MsSqlDialect {})]);
dialects: vec![Box::new(MsSqlDialect {})],
options: None,
};
assert_eq!( assert_eq!(
unsupported_dialects.parse_sql_statements(sql).unwrap_err(), unsupported_dialects.parse_sql_statements(sql).unwrap_err(),
@ -538,15 +535,9 @@ fn parse_use() {
} }
fn hive() -> TestedDialects { fn hive() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(HiveDialect {})])
dialects: vec![Box::new(HiveDialect {})],
options: None,
}
} }
fn hive_and_generic() -> TestedDialects { fn hive_and_generic() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(HiveDialect {}), Box::new(GenericDialect {})])
dialects: vec![Box::new(HiveDialect {}), Box::new(GenericDialect {})],
options: None,
}
} }

View file

@ -1030,14 +1030,8 @@ fn parse_create_table_with_identity_column() {
} }
fn ms() -> TestedDialects { fn ms() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(MsSqlDialect {})])
dialects: vec![Box::new(MsSqlDialect {})],
options: None,
}
} }
fn ms_and_generic() -> TestedDialects { fn ms_and_generic() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(MsSqlDialect {}), Box::new(GenericDialect {})])
dialects: vec![Box::new(MsSqlDialect {}), Box::new(GenericDialect {})],
options: None,
}
} }

View file

@ -944,11 +944,7 @@ fn parse_quote_identifiers() {
fn parse_escaped_quote_identifiers_with_escape() { fn parse_escaped_quote_identifiers_with_escape() {
let sql = "SELECT `quoted `` identifier`"; let sql = "SELECT `quoted `` identifier`";
assert_eq!( assert_eq!(
TestedDialects { TestedDialects::new(vec![Box::new(MySqlDialect {})]).verified_stmt(sql),
dialects: vec![Box::new(MySqlDialect {})],
options: None,
}
.verified_stmt(sql),
Statement::Query(Box::new(Query { Statement::Query(Box::new(Query {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
@ -991,13 +987,13 @@ fn parse_escaped_quote_identifiers_with_escape() {
fn parse_escaped_quote_identifiers_with_no_escape() { fn parse_escaped_quote_identifiers_with_no_escape() {
let sql = "SELECT `quoted `` identifier`"; let sql = "SELECT `quoted `` identifier`";
assert_eq!( assert_eq!(
TestedDialects { TestedDialects::new_with_options(
dialects: vec![Box::new(MySqlDialect {})], vec![Box::new(MySqlDialect {})],
options: Some(ParserOptions { ParserOptions {
trailing_commas: false, trailing_commas: false,
unescape: false, unescape: false,
}), }
} )
.verified_stmt(sql), .verified_stmt(sql),
Statement::Query(Box::new(Query { Statement::Query(Box::new(Query {
with: None, with: None,
@ -1041,11 +1037,7 @@ fn parse_escaped_quote_identifiers_with_no_escape() {
fn parse_escaped_backticks_with_escape() { fn parse_escaped_backticks_with_escape() {
let sql = "SELECT ```quoted identifier```"; let sql = "SELECT ```quoted identifier```";
assert_eq!( assert_eq!(
TestedDialects { TestedDialects::new(vec![Box::new(MySqlDialect {})]).verified_stmt(sql),
dialects: vec![Box::new(MySqlDialect {})],
options: None,
}
.verified_stmt(sql),
Statement::Query(Box::new(Query { Statement::Query(Box::new(Query {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
@ -1088,10 +1080,10 @@ fn parse_escaped_backticks_with_escape() {
fn parse_escaped_backticks_with_no_escape() { fn parse_escaped_backticks_with_no_escape() {
let sql = "SELECT ```quoted identifier```"; let sql = "SELECT ```quoted identifier```";
assert_eq!( assert_eq!(
TestedDialects { TestedDialects::new_with_options(
dialects: vec![Box::new(MySqlDialect {})], vec![Box::new(MySqlDialect {})],
options: Some(ParserOptions::new().with_unescape(false)), ParserOptions::new().with_unescape(false)
} )
.verified_stmt(sql), .verified_stmt(sql),
Statement::Query(Box::new(Query { Statement::Query(Box::new(Query {
with: None, with: None,
@ -1144,55 +1136,26 @@ fn parse_unterminated_escape() {
#[test] #[test]
fn check_roundtrip_of_escaped_string() { fn check_roundtrip_of_escaped_string() {
let options = Some(ParserOptions::new().with_unescape(false)); let options = ParserOptions::new().with_unescape(false);
TestedDialects { TestedDialects::new_with_options(vec![Box::new(MySqlDialect {})], options.clone())
dialects: vec![Box::new(MySqlDialect {})], .verified_stmt(r"SELECT 'I\'m fine'");
options: options.clone(), TestedDialects::new_with_options(vec![Box::new(MySqlDialect {})], options.clone())
} .verified_stmt(r#"SELECT 'I''m fine'"#);
.verified_stmt(r"SELECT 'I\'m fine'"); TestedDialects::new_with_options(vec![Box::new(MySqlDialect {})], options.clone())
TestedDialects { .verified_stmt(r"SELECT 'I\\\'m fine'");
dialects: vec![Box::new(MySqlDialect {})], TestedDialects::new_with_options(vec![Box::new(MySqlDialect {})], options.clone())
options: options.clone(), .verified_stmt(r"SELECT 'I\\\'m fine'");
} TestedDialects::new_with_options(vec![Box::new(MySqlDialect {})], options.clone())
.verified_stmt(r#"SELECT 'I''m fine'"#); .verified_stmt(r#"SELECT "I\"m fine""#);
TestedDialects { TestedDialects::new_with_options(vec![Box::new(MySqlDialect {})], options.clone())
dialects: vec![Box::new(MySqlDialect {})], .verified_stmt(r#"SELECT "I""m fine""#);
options: options.clone(), TestedDialects::new_with_options(vec![Box::new(MySqlDialect {})], options.clone())
} .verified_stmt(r#"SELECT "I\\\"m fine""#);
.verified_stmt(r"SELECT 'I\\\'m fine'"); TestedDialects::new_with_options(vec![Box::new(MySqlDialect {})], options.clone())
TestedDialects { .verified_stmt(r#"SELECT "I\\\"m fine""#);
dialects: vec![Box::new(MySqlDialect {})], TestedDialects::new_with_options(vec![Box::new(MySqlDialect {})], options.clone())
options: options.clone(), .verified_stmt(r#"SELECT "I'm ''fine''""#);
}
.verified_stmt(r"SELECT 'I\\\'m fine'");
TestedDialects {
dialects: vec![Box::new(MySqlDialect {})],
options: options.clone(),
}
.verified_stmt(r#"SELECT "I\"m fine""#);
TestedDialects {
dialects: vec![Box::new(MySqlDialect {})],
options: options.clone(),
}
.verified_stmt(r#"SELECT "I""m fine""#);
TestedDialects {
dialects: vec![Box::new(MySqlDialect {})],
options: options.clone(),
}
.verified_stmt(r#"SELECT "I\\\"m fine""#);
TestedDialects {
dialects: vec![Box::new(MySqlDialect {})],
options: options.clone(),
}
.verified_stmt(r#"SELECT "I\\\"m fine""#);
TestedDialects {
dialects: vec![Box::new(MySqlDialect {})],
options,
}
.verified_stmt(r#"SELECT "I'm ''fine''""#);
} }
#[test] #[test]
@ -2624,17 +2587,11 @@ fn parse_create_table_with_fulltext_definition_should_not_accept_constraint_name
} }
fn mysql() -> TestedDialects { fn mysql() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(MySqlDialect {})])
dialects: vec![Box::new(MySqlDialect {})],
options: None,
}
} }
fn mysql_and_generic() -> TestedDialects { fn mysql_and_generic() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(MySqlDialect {}), Box::new(GenericDialect {})])
dialects: vec![Box::new(MySqlDialect {}), Box::new(GenericDialect {})],
options: None,
}
} }
#[test] #[test]

View file

@ -2973,17 +2973,14 @@ fn parse_on_commit() {
} }
fn pg() -> TestedDialects { fn pg() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(PostgreSqlDialect {})])
dialects: vec![Box::new(PostgreSqlDialect {})],
options: None,
}
} }
fn pg_and_generic() -> TestedDialects { fn pg_and_generic() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![
dialects: vec![Box::new(PostgreSqlDialect {}), Box::new(GenericDialect {})], Box::new(PostgreSqlDialect {}),
options: None, Box::new(GenericDialect {}),
} ])
} }
#[test] #[test]

View file

@ -171,17 +171,14 @@ fn parse_delimited_identifiers() {
} }
fn redshift() -> TestedDialects { fn redshift() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(RedshiftSqlDialect {})])
dialects: vec![Box::new(RedshiftSqlDialect {})],
options: None,
}
} }
fn redshift_and_generic() -> TestedDialects { fn redshift_and_generic() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![
dialects: vec![Box::new(RedshiftSqlDialect {}), Box::new(GenericDialect {})], Box::new(RedshiftSqlDialect {}),
options: None, Box::new(GenericDialect {}),
} ])
} }
#[test] #[test]

View file

@ -854,10 +854,8 @@ fn parse_sf_create_or_replace_view_with_comment_missing_equal() {
#[test] #[test]
fn parse_sf_create_or_replace_with_comment_for_snowflake() { fn parse_sf_create_or_replace_with_comment_for_snowflake() {
let sql = "CREATE OR REPLACE VIEW v COMMENT = 'hello, world' AS SELECT 1"; let sql = "CREATE OR REPLACE VIEW v COMMENT = 'hello, world' AS SELECT 1";
let dialect = test_utils::TestedDialects { let dialect =
dialects: vec![Box::new(SnowflakeDialect {}) as Box<dyn Dialect>], test_utils::TestedDialects::new(vec![Box::new(SnowflakeDialect {}) as Box<dyn Dialect>]);
options: None,
};
match dialect.verified_stmt(sql) { match dialect.verified_stmt(sql) {
Statement::CreateView { Statement::CreateView {
@ -1250,24 +1248,25 @@ fn test_array_agg_func() {
} }
fn snowflake() -> TestedDialects { fn snowflake() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(SnowflakeDialect {})])
dialects: vec![Box::new(SnowflakeDialect {})], }
options: None,
} fn snowflake_with_recursion_limit(recursion_limit: usize) -> TestedDialects {
TestedDialects::new(vec![Box::new(SnowflakeDialect {})]).with_recursion_limit(recursion_limit)
} }
fn snowflake_without_unescape() -> TestedDialects { fn snowflake_without_unescape() -> TestedDialects {
TestedDialects { TestedDialects::new_with_options(
dialects: vec![Box::new(SnowflakeDialect {})], vec![Box::new(SnowflakeDialect {})],
options: Some(ParserOptions::new().with_unescape(false)), ParserOptions::new().with_unescape(false),
} )
} }
fn snowflake_and_generic() -> TestedDialects { fn snowflake_and_generic() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![
dialects: vec![Box::new(SnowflakeDialect {}), Box::new(GenericDialect {})], Box::new(SnowflakeDialect {}),
options: None, Box::new(GenericDialect {}),
} ])
} }
#[test] #[test]
@ -2759,3 +2758,26 @@ fn parse_view_column_descriptions() {
_ => unreachable!(), _ => unreachable!(),
}; };
} }
#[test]
fn test_parentheses_overflow() {
let max_nesting_level: usize = 30;
// Verify the recursion check is not too wasteful... (num of parentheses - 2 is acceptable)
let slack = 2;
let l_parens = "(".repeat(max_nesting_level - slack);
let r_parens = ")".repeat(max_nesting_level - slack);
let sql = format!("SELECT * FROM {l_parens}a.b.c{r_parens}");
let parsed =
snowflake_with_recursion_limit(max_nesting_level).parse_sql_statements(sql.as_str());
assert_eq!(parsed.err(), None);
// Verify the recursion check triggers... (num of parentheses - 1 is acceptable)
let slack = 1;
let l_parens = "(".repeat(max_nesting_level - slack);
let r_parens = ")".repeat(max_nesting_level - slack);
let sql = format!("SELECT * FROM {l_parens}a.b.c{r_parens}");
let parsed =
snowflake_with_recursion_limit(max_nesting_level).parse_sql_statements(sql.as_str());
assert_eq!(parsed.err(), Some(ParserError::RecursionLimitExceeded));
}

View file

@ -529,14 +529,13 @@ fn parse_start_transaction_with_modifier() {
sqlite_and_generic().one_statement_parses_to("BEGIN IMMEDIATE", "BEGIN IMMEDIATE TRANSACTION"); sqlite_and_generic().one_statement_parses_to("BEGIN IMMEDIATE", "BEGIN IMMEDIATE TRANSACTION");
sqlite_and_generic().one_statement_parses_to("BEGIN EXCLUSIVE", "BEGIN EXCLUSIVE TRANSACTION"); sqlite_and_generic().one_statement_parses_to("BEGIN EXCLUSIVE", "BEGIN EXCLUSIVE TRANSACTION");
let unsupported_dialects = TestedDialects { let unsupported_dialects = TestedDialects::new(
dialects: all_dialects() all_dialects()
.dialects .dialects
.into_iter() .into_iter()
.filter(|x| !(x.is::<SQLiteDialect>() || x.is::<GenericDialect>())) .filter(|x| !(x.is::<SQLiteDialect>() || x.is::<GenericDialect>()))
.collect(), .collect(),
options: None, );
};
let res = unsupported_dialects.parse_sql_statements("BEGIN DEFERRED"); let res = unsupported_dialects.parse_sql_statements("BEGIN DEFERRED");
assert_eq!( assert_eq!(
ParserError::ParserError("Expected: end of statement, found: DEFERRED".to_string()), ParserError::ParserError("Expected: end of statement, found: DEFERRED".to_string()),
@ -571,22 +570,16 @@ fn test_dollar_identifier_as_placeholder() {
} }
fn sqlite() -> TestedDialects { fn sqlite() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![Box::new(SQLiteDialect {})])
dialects: vec![Box::new(SQLiteDialect {})],
options: None,
}
} }
fn sqlite_with_options(options: ParserOptions) -> TestedDialects { fn sqlite_with_options(options: ParserOptions) -> TestedDialects {
TestedDialects { TestedDialects::new_with_options(vec![Box::new(SQLiteDialect {})], options)
dialects: vec![Box::new(SQLiteDialect {})],
options: Some(options),
}
} }
fn sqlite_and_generic() -> TestedDialects { fn sqlite_and_generic() -> TestedDialects {
TestedDialects { TestedDialects::new(vec![
dialects: vec![Box::new(SQLiteDialect {}), Box::new(GenericDialect {})], Box::new(SQLiteDialect {}),
options: None, Box::new(GenericDialect {}),
} ])
} }