Merge branch 'main' into solontsev/pg-create-server

# Conflicts:
#	tests/sqlparser_postgres.rs
This commit is contained in:
Sergey Olontsev 2025-07-03 19:30:10 +03:00
commit d0102b24f1
No known key found for this signature in database
GPG key ID: 67DDC06CA52E8A39
19 changed files with 1235 additions and 105 deletions

View file

@ -67,8 +67,11 @@ impl fmt::Display for ReplicaIdentity {
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub enum AlterTableOperation {
/// `ADD <table_constraint>`
AddConstraint(TableConstraint),
/// `ADD <table_constraint> [NOT VALID]`
AddConstraint {
constraint: TableConstraint,
not_valid: bool,
},
/// `ADD [COLUMN] [IF NOT EXISTS] <column_def>`
AddColumn {
/// `[COLUMN]`.
@ -344,6 +347,10 @@ pub enum AlterTableOperation {
equals: bool,
value: ValueWithSpan,
},
/// `VALIDATE CONSTRAINT <name>`
ValidateConstraint {
name: Ident,
},
}
/// An `ALTER Policy` (`Statement::AlterPolicy`) operation
@ -494,7 +501,16 @@ impl fmt::Display for AlterTableOperation {
display_separated(new_partitions, " "),
ine = if *if_not_exists { " IF NOT EXISTS" } else { "" }
),
AlterTableOperation::AddConstraint(c) => write!(f, "ADD {c}"),
AlterTableOperation::AddConstraint {
not_valid,
constraint,
} => {
write!(f, "ADD {constraint}")?;
if *not_valid {
write!(f, " NOT VALID")?;
}
Ok(())
}
AlterTableOperation::AddColumn {
column_keyword,
if_not_exists,
@ -772,6 +788,9 @@ impl fmt::Display for AlterTableOperation {
AlterTableOperation::ReplicaIdentity { identity } => {
write!(f, "REPLICA IDENTITY {identity}")
}
AlterTableOperation::ValidateConstraint { name } => {
write!(f, "VALIDATE CONSTRAINT {name}")
}
}
}
}
@ -893,7 +912,10 @@ pub enum AlterColumnOperation {
data_type: DataType,
/// PostgreSQL specific
using: Option<Expr>,
/// Set to true if the statement includes the `SET DATA TYPE` keywords
had_set: bool,
},
/// `ADD GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( sequence_options ) ]`
///
/// Note: this is a PostgreSQL-specific operation.
@ -914,12 +936,19 @@ impl fmt::Display for AlterColumnOperation {
AlterColumnOperation::DropDefault => {
write!(f, "DROP DEFAULT")
}
AlterColumnOperation::SetDataType { data_type, using } => {
if let Some(expr) = using {
write!(f, "SET DATA TYPE {data_type} USING {expr}")
} else {
write!(f, "SET DATA TYPE {data_type}")
AlterColumnOperation::SetDataType {
data_type,
using,
had_set,
} => {
if *had_set {
write!(f, "SET DATA ")?;
}
write!(f, "TYPE {data_type}")?;
if let Some(expr) = using {
write!(f, " USING {expr}")?;
}
Ok(())
}
AlterColumnOperation::AddGenerated {
generated_as,

View file

@ -996,7 +996,7 @@ pub enum Expr {
data_type: DataType,
/// The value of the constant.
/// Hint: you can unwrap the string value using `value.into_string()`.
value: Value,
value: ValueWithSpan,
},
/// Scalar function call e.g. `LEFT(foo, 5)`
Function(Function),
@ -1124,6 +1124,8 @@ pub enum Expr {
/// [Databricks](https://docs.databricks.com/en/sql/language-manual/sql-ref-lambda-functions.html)
/// [DuckDb](https://duckdb.org/docs/sql/functions/lambda.html)
Lambda(LambdaFunction),
/// Checks membership of a value in a JSON array
MemberOf(MemberOf),
}
impl Expr {
@ -1912,6 +1914,7 @@ impl fmt::Display for Expr {
}
Expr::Prior(expr) => write!(f, "PRIOR {expr}"),
Expr::Lambda(lambda) => write!(f, "{lambda}"),
Expr::MemberOf(member_of) => write!(f, "{member_of}"),
}
}
}
@ -9900,6 +9903,27 @@ impl fmt::Display for NullInclusion {
}
}
/// Checks membership of a value in a JSON array
///
/// Syntax:
/// ```sql
/// <value> MEMBER OF(<array>)
/// ```
/// [MySQL](https://dev.mysql.com/doc/refman/8.4/en/json-search-functions.html#operator_member-of)
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct MemberOf {
pub value: Box<Expr>,
pub array: Box<Expr>,
}
impl fmt::Display for MemberOf {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} MEMBER OF({})", self.value, self.array)
}
}
#[cfg(test)]
mod tests {
use crate::tokenizer::Location;

View file

@ -2684,6 +2684,79 @@ pub enum PipeOperator {
/// Syntax: `|> TABLESAMPLE SYSTEM (10 PERCENT)
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#tablesample_pipe_operator>
TableSample { sample: Box<TableSample> },
/// Renames columns in the input table.
///
/// Syntax: `|> RENAME old_name AS new_name, ...`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#rename_pipe_operator>
Rename { mappings: Vec<IdentWithAlias> },
/// Combines the input table with one or more tables using UNION.
///
/// Syntax: `|> UNION [ALL|DISTINCT] (<query>), (<query>), ...`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#union_pipe_operator>
Union {
set_quantifier: SetQuantifier,
queries: Vec<Query>,
},
/// Returns only the rows that are present in both the input table and the specified tables.
///
/// Syntax: `|> INTERSECT [DISTINCT] (<query>), (<query>), ...`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#intersect_pipe_operator>
Intersect {
set_quantifier: SetQuantifier,
queries: Vec<Query>,
},
/// Returns only the rows that are present in the input table but not in the specified tables.
///
/// Syntax: `|> EXCEPT DISTINCT (<query>), (<query>), ...`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#except_pipe_operator>
Except {
set_quantifier: SetQuantifier,
queries: Vec<Query>,
},
/// Calls a table function or procedure that returns a table.
///
/// Syntax: `|> CALL function_name(args) [AS alias]`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#call_pipe_operator>
Call {
function: Function,
alias: Option<Ident>,
},
/// Pivots data from rows to columns.
///
/// Syntax: `|> PIVOT(aggregate_function(column) FOR pivot_column IN (value1, value2, ...)) [AS alias]`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#pivot_pipe_operator>
Pivot {
aggregate_functions: Vec<ExprWithAlias>,
value_column: Vec<Ident>,
value_source: PivotValueSource,
alias: Option<Ident>,
},
/// The `UNPIVOT` pipe operator transforms columns into rows.
///
/// Syntax:
/// ```sql
/// |> UNPIVOT(value_column FOR name_column IN (column1, column2, ...)) [alias]
/// ```
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#unpivot_pipe_operator>
Unpivot {
value_column: Ident,
name_column: Ident,
unpivot_columns: Vec<Ident>,
alias: Option<Ident>,
},
/// Joins the input table with another table.
///
/// Syntax: `|> [JOIN_TYPE] JOIN <table> [alias] ON <condition>` or `|> [JOIN_TYPE] JOIN <table> [alias] USING (<columns>)`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#join_pipe_operator>
Join(Join),
}
impl fmt::Display for PipeOperator {
@ -2739,10 +2812,90 @@ impl fmt::Display for PipeOperator {
PipeOperator::TableSample { sample } => {
write!(f, "{sample}")
}
PipeOperator::Rename { mappings } => {
write!(f, "RENAME {}", display_comma_separated(mappings))
}
PipeOperator::Union {
set_quantifier,
queries,
} => Self::fmt_set_operation(f, "UNION", set_quantifier, queries),
PipeOperator::Intersect {
set_quantifier,
queries,
} => Self::fmt_set_operation(f, "INTERSECT", set_quantifier, queries),
PipeOperator::Except {
set_quantifier,
queries,
} => Self::fmt_set_operation(f, "EXCEPT", set_quantifier, queries),
PipeOperator::Call { function, alias } => {
write!(f, "CALL {function}")?;
Self::fmt_optional_alias(f, alias)
}
PipeOperator::Pivot {
aggregate_functions,
value_column,
value_source,
alias,
} => {
write!(
f,
"PIVOT({} FOR {} IN ({}))",
display_comma_separated(aggregate_functions),
Expr::CompoundIdentifier(value_column.to_vec()),
value_source
)?;
Self::fmt_optional_alias(f, alias)
}
PipeOperator::Unpivot {
value_column,
name_column,
unpivot_columns,
alias,
} => {
write!(
f,
"UNPIVOT({} FOR {} IN ({}))",
value_column,
name_column,
display_comma_separated(unpivot_columns)
)?;
Self::fmt_optional_alias(f, alias)
}
PipeOperator::Join(join) => write!(f, "{join}"),
}
}
}
impl PipeOperator {
/// Helper function to format optional alias for pipe operators
fn fmt_optional_alias(f: &mut fmt::Formatter<'_>, alias: &Option<Ident>) -> fmt::Result {
if let Some(alias) = alias {
write!(f, " AS {alias}")?;
}
Ok(())
}
/// Helper function to format set operations (UNION, INTERSECT, EXCEPT) with queries
fn fmt_set_operation(
f: &mut fmt::Formatter<'_>,
operation: &str,
set_quantifier: &SetQuantifier,
queries: &[Query],
) -> fmt::Result {
write!(f, "{operation}")?;
match set_quantifier {
SetQuantifier::None => {}
_ => {
write!(f, " {set_quantifier}")?;
}
}
write!(f, " ")?;
let parenthesized_queries: Vec<String> =
queries.iter().map(|query| format!("({query})")).collect();
write!(f, "{}", display_comma_separated(&parenthesized_queries))
}
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]

View file

@ -925,6 +925,7 @@ impl Spanned for AlterColumnOperation {
AlterColumnOperation::SetDataType {
data_type: _,
using,
had_set: _,
} => using.as_ref().map_or(Span::empty(), |u| u.span()),
AlterColumnOperation::AddGenerated { .. } => Span::empty(),
}
@ -1075,7 +1076,10 @@ impl Spanned for CreateTableOptions {
impl Spanned for AlterTableOperation {
fn span(&self) -> Span {
match self {
AlterTableOperation::AddConstraint(table_constraint) => table_constraint.span(),
AlterTableOperation::AddConstraint {
constraint,
not_valid: _,
} => constraint.span(),
AlterTableOperation::AddColumn {
column_keyword: _,
if_not_exists: _,
@ -1196,6 +1200,7 @@ impl Spanned for AlterTableOperation {
AlterTableOperation::AutoIncrement { value, .. } => value.span(),
AlterTableOperation::Lock { .. } => Span::empty(),
AlterTableOperation::ReplicaIdentity { .. } => Span::empty(),
AlterTableOperation::ValidateConstraint { name } => name.span,
}
}
}
@ -1411,7 +1416,6 @@ impl Spanned for AssignmentTarget {
/// f.e. `IS NULL <expr>` reports as `<expr>::span`.
///
/// Missing spans:
/// - [Expr::TypedString] # missing span for data_type
/// - [Expr::MatchAgainst] # MySQL specific
/// - [Expr::RLike] # MySQL specific
/// - [Expr::Struct] # BigQuery specific
@ -1620,6 +1624,7 @@ impl Spanned for Expr {
Expr::OuterJoin(expr) => expr.span(),
Expr::Prior(expr) => expr.span(),
Expr::Lambda(_) => Span::empty(),
Expr::MemberOf(member_of) => member_of.value.span().union(&member_of.array.span()),
}
}
}

View file

@ -52,6 +52,10 @@ impl Dialect for GenericDialect {
true
}
fn supports_left_associative_joins_without_parens(&self) -> bool {
true
}
fn supports_connect_by(&self) -> bool {
true
}
@ -108,6 +112,10 @@ impl Dialect for GenericDialect {
true
}
fn supports_from_first_select(&self) -> bool {
true
}
fn supports_asc_desc_in_column_definition(&self) -> bool {
true
}

View file

@ -278,6 +278,34 @@ pub trait Dialect: Debug + Any {
false
}
/// Indicates whether the dialect supports left-associative join parsing
/// by default when parentheses are omitted in nested joins.
///
/// Most dialects (like MySQL or Postgres) assume **left-associative** precedence,
/// so a query like:
///
/// ```sql
/// SELECT * FROM t1 NATURAL JOIN t5 INNER JOIN t0 ON ...
/// ```
/// is interpreted as:
/// ```sql
/// ((t1 NATURAL JOIN t5) INNER JOIN t0 ON ...)
/// ```
/// and internally represented as a **flat list** of joins.
///
/// In contrast, some dialects (e.g. **Snowflake**) assume **right-associative**
/// precedence and interpret the same query as:
/// ```sql
/// (t1 NATURAL JOIN (t5 INNER JOIN t0 ON ...))
/// ```
/// which results in a **nested join** structure in the AST.
///
/// If this method returns `false`, the parser must build nested join trees
/// even in the absence of parentheses to reflect the correct associativity
fn supports_left_associative_joins_without_parens(&self) -> bool {
true
}
/// Returns true if the dialect supports the `(+)` syntax for OUTER JOIN.
fn supports_outer_join_operator(&self) -> bool {
false
@ -621,6 +649,7 @@ pub trait Dialect: Debug + Any {
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)),
_ => Ok(self.prec_unknown()),
},
Token::Word(w) if w.keyword == Keyword::IS => Ok(p!(Is)),
@ -633,6 +662,7 @@ pub trait Dialect: Debug + Any {
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::OPERATOR => Ok(p!(Between)),
Token::Word(w) if w.keyword == Keyword::DIV => Ok(p!(MulDivModOp)),
Token::Period => Ok(p!(Period)),
@ -1032,6 +1062,15 @@ pub trait Dialect: Debug + Any {
fn supports_space_separated_column_options(&self) -> bool {
false
}
/// Returns true if the dialect supports the `USING` clause in an `ALTER COLUMN` statement.
/// Example:
/// ```sql
/// ALTER TABLE tbl ALTER COLUMN col SET DATA TYPE <type> USING <exp>`
/// ```
fn supports_alter_column_type_using(&self) -> bool {
false
}
}
/// This represents the operators for which precedence must be defined

View file

@ -258,4 +258,8 @@ impl Dialect for PostgreSqlDialect {
fn supports_set_names(&self) -> bool {
true
}
fn supports_alter_column_type_using(&self) -> bool {
true
}
}

View file

@ -80,13 +80,15 @@ impl Dialect for RedshiftSqlDialect {
}
fn is_identifier_start(&self, ch: char) -> bool {
// Extends Postgres dialect with sharp
PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#'
// Extends Postgres dialect with sharp and UTF-8 multibyte chars
// https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#' || !ch.is_ascii()
}
fn is_identifier_part(&self, ch: char) -> bool {
// Extends Postgres dialect with sharp
PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#'
// Extends Postgres dialect with sharp and UTF-8 multibyte chars
// https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#' || !ch.is_ascii()
}
/// redshift has `CONVERT(type, value)` instead of `CONVERT(value, type)`

View file

@ -283,6 +283,10 @@ impl Dialect for SnowflakeDialect {
true
}
fn supports_left_associative_joins_without_parens(&self) -> bool {
false
}
fn is_reserved_for_identifier(&self, kw: Keyword) -> bool {
// Unreserve some keywords that Snowflake accepts as identifiers
// See: https://docs.snowflake.com/en/sql-reference/reserved-keywords

View file

@ -982,6 +982,7 @@ define_keywords!(
UUID,
VACUUM,
VALID,
VALIDATE,
VALIDATION_MODE,
VALUE,
VALUES,

View file

@ -1521,7 +1521,7 @@ impl<'a> Parser<'a> {
DataType::Custom(..) => parser_err!("dummy", loc),
data_type => Ok(Expr::TypedString {
data_type,
value: parser.parse_value()?.value,
value: parser.parse_value()?,
}),
}
})?;
@ -1708,10 +1708,9 @@ impl<'a> Parser<'a> {
}
fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
let value: Value = self.parse_value()?.value;
Ok(Expr::TypedString {
data_type: DataType::GeometricType(kind),
value,
value: self.parse_value()?,
})
}
@ -2576,7 +2575,7 @@ impl<'a> Parser<'a> {
trim_characters: None,
})
} else if self.consume_token(&Token::Comma)
&& dialect_of!(self is SnowflakeDialect | BigQueryDialect | GenericDialect)
&& dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
{
let characters = self.parse_comma_separated(Parser::parse_expr)?;
self.expect_token(&Token::RParen)?;
@ -3609,6 +3608,19 @@ impl<'a> Parser<'a> {
self.expected("IN or BETWEEN after NOT", self.peek_token())
}
}
Keyword::MEMBER => {
if self.parse_keyword(Keyword::OF) {
self.expect_token(&Token::LParen)?;
let array = self.parse_expr()?;
self.expect_token(&Token::RParen)?;
Ok(Expr::MemberOf(MemberOf {
value: Box::new(expr),
array: Box::new(array),
}))
} else {
self.expected("OF after MEMBER", self.peek_token())
}
}
// Can only happen if `get_next_precedence` got out of sync with this function
_ => parser_err!(
format!("No infix parser for token {:?}", tok.token),
@ -8479,7 +8491,11 @@ impl<'a> Parser<'a> {
pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
let operation = if self.parse_keyword(Keyword::ADD) {
if let Some(constraint) = self.parse_optional_table_constraint()? {
AlterTableOperation::AddConstraint(constraint)
let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
AlterTableOperation::AddConstraint {
constraint,
not_valid,
}
} else if dialect_of!(self is ClickHouseDialect|GenericDialect)
&& self.parse_keyword(Keyword::PROJECTION)
{
@ -8736,16 +8752,10 @@ impl<'a> Parser<'a> {
}
} else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
AlterColumnOperation::DropDefault {}
} else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE])
|| (is_postgresql && self.parse_keyword(Keyword::TYPE))
{
let data_type = self.parse_data_type()?;
let using = if is_postgresql && self.parse_keyword(Keyword::USING) {
Some(self.parse_expr()?)
} else {
None
};
AlterColumnOperation::SetDataType { data_type, using }
} else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
self.parse_set_data_type(true)?
} else if self.parse_keyword(Keyword::TYPE) {
self.parse_set_data_type(false)?
} else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
Some(GeneratedAs::Always)
@ -8894,6 +8904,9 @@ impl<'a> Parser<'a> {
};
AlterTableOperation::ReplicaIdentity { identity }
} else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
let name = self.parse_identifier()?;
AlterTableOperation::ValidateConstraint { name }
} else {
let options: Vec<SqlOption> =
self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
@ -8911,6 +8924,22 @@ impl<'a> Parser<'a> {
Ok(operation)
}
fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
let data_type = self.parse_data_type()?;
let using = if self.dialect.supports_alter_column_type_using()
&& self.parse_keyword(Keyword::USING)
{
Some(self.parse_expr()?)
} else {
None
};
Ok(AlterColumnOperation::SetDataType {
data_type,
using,
had_set,
})
}
fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
match keyword {
@ -9990,6 +10019,48 @@ impl<'a> Parser<'a> {
Ok(IdentWithAlias { ident, alias })
}
/// Parse `identifier [AS] identifier` where the AS keyword is optional
fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
let ident = self.parse_identifier()?;
let _after_as = self.parse_keyword(Keyword::AS);
let alias = self.parse_identifier()?;
Ok(IdentWithAlias { ident, alias })
}
/// Parse comma-separated list of parenthesized queries for pipe operators
fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
self.parse_comma_separated(|parser| {
parser.expect_token(&Token::LParen)?;
let query = parser.parse_query()?;
parser.expect_token(&Token::RParen)?;
Ok(*query)
})
}
/// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT
fn parse_distinct_required_set_quantifier(
&mut self,
operator_name: &str,
) -> Result<SetQuantifier, ParserError> {
let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
match quantifier {
SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
_ => Err(ParserError::ParserError(format!(
"{operator_name} pipe operator requires DISTINCT modifier",
))),
}
}
/// Parse optional identifier alias (with or without AS keyword)
fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
if self.parse_keyword(Keyword::AS) {
Ok(Some(self.parse_identifier()?))
} else {
// Check if the next token is an identifier (implicit alias)
self.maybe_parse(|parser| parser.parse_identifier())
}
}
/// Optionally parses an alias for a select list item
fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
@ -11136,6 +11207,19 @@ impl<'a> Parser<'a> {
Keyword::AGGREGATE,
Keyword::ORDER,
Keyword::TABLESAMPLE,
Keyword::RENAME,
Keyword::UNION,
Keyword::INTERSECT,
Keyword::EXCEPT,
Keyword::CALL,
Keyword::PIVOT,
Keyword::UNPIVOT,
Keyword::JOIN,
Keyword::INNER,
Keyword::LEFT,
Keyword::RIGHT,
Keyword::FULL,
Keyword::CROSS,
])?;
match kw {
Keyword::SELECT => {
@ -11202,6 +11286,121 @@ impl<'a> Parser<'a> {
let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
pipe_operators.push(PipeOperator::TableSample { sample });
}
Keyword::RENAME => {
let mappings =
self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
pipe_operators.push(PipeOperator::Rename { mappings });
}
Keyword::UNION => {
let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
let queries = self.parse_pipe_operator_queries()?;
pipe_operators.push(PipeOperator::Union {
set_quantifier,
queries,
});
}
Keyword::INTERSECT => {
let set_quantifier =
self.parse_distinct_required_set_quantifier("INTERSECT")?;
let queries = self.parse_pipe_operator_queries()?;
pipe_operators.push(PipeOperator::Intersect {
set_quantifier,
queries,
});
}
Keyword::EXCEPT => {
let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
let queries = self.parse_pipe_operator_queries()?;
pipe_operators.push(PipeOperator::Except {
set_quantifier,
queries,
});
}
Keyword::CALL => {
let function_name = self.parse_object_name(false)?;
let function_expr = self.parse_function(function_name)?;
if let Expr::Function(function) = function_expr {
let alias = self.parse_identifier_optional_alias()?;
pipe_operators.push(PipeOperator::Call { function, alias });
} else {
return Err(ParserError::ParserError(
"Expected function call after CALL".to_string(),
));
}
}
Keyword::PIVOT => {
self.expect_token(&Token::LParen)?;
let aggregate_functions =
self.parse_comma_separated(Self::parse_aliased_function_call)?;
self.expect_keyword_is(Keyword::FOR)?;
let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
self.expect_keyword_is(Keyword::IN)?;
self.expect_token(&Token::LParen)?;
let value_source = if self.parse_keyword(Keyword::ANY) {
let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
self.parse_comma_separated(Parser::parse_order_by_expr)?
} else {
vec![]
};
PivotValueSource::Any(order_by)
} else if self.peek_sub_query() {
PivotValueSource::Subquery(self.parse_query()?)
} else {
PivotValueSource::List(
self.parse_comma_separated(Self::parse_expr_with_alias)?,
)
};
self.expect_token(&Token::RParen)?;
self.expect_token(&Token::RParen)?;
let alias = self.parse_identifier_optional_alias()?;
pipe_operators.push(PipeOperator::Pivot {
aggregate_functions,
value_column,
value_source,
alias,
});
}
Keyword::UNPIVOT => {
self.expect_token(&Token::LParen)?;
let value_column = self.parse_identifier()?;
self.expect_keyword(Keyword::FOR)?;
let name_column = self.parse_identifier()?;
self.expect_keyword(Keyword::IN)?;
self.expect_token(&Token::LParen)?;
let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
self.expect_token(&Token::RParen)?;
self.expect_token(&Token::RParen)?;
let alias = self.parse_identifier_optional_alias()?;
pipe_operators.push(PipeOperator::Unpivot {
value_column,
name_column,
unpivot_columns,
alias,
});
}
Keyword::JOIN
| Keyword::INNER
| Keyword::LEFT
| Keyword::RIGHT
| Keyword::FULL
| Keyword::CROSS => {
self.prev_token();
let mut joins = self.parse_joins()?;
if joins.len() != 1 {
return Err(ParserError::ParserError(
"Join pipe operator must have a single join".to_string(),
));
}
let join = joins.swap_remove(0);
pipe_operators.push(PipeOperator::Join(join))
}
unhandled => {
return Err(ParserError::ParserError(format!(
"`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
@ -12497,7 +12696,11 @@ impl<'a> Parser<'a> {
};
let mut relation = self.parse_table_factor()?;
if self.peek_parens_less_nested_join() {
if !self
.dialect
.supports_left_associative_joins_without_parens()
&& self.peek_parens_less_nested_join()
{
let joins = self.parse_joins()?;
relation = TableFactor::NestedJoin {
table_with_joins: Box::new(TableWithJoins { relation, joins }),

View file

@ -479,20 +479,25 @@ pub fn index_column(stmt: Statement) -> Expr {
}
}
Statement::AlterTable { operations, .. } => match operations.first().unwrap() {
AlterTableOperation::AddConstraint(TableConstraint::Index { columns, .. }) => {
columns.first().unwrap().column.expr.clone()
AlterTableOperation::AddConstraint { constraint, .. } => {
match constraint {
TableConstraint::Index { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::Unique { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::PrimaryKey { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::FulltextOrSpatial {
columns,
..
} => columns.first().unwrap().column.expr.clone(),
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
}
}
AlterTableOperation::AddConstraint(TableConstraint::Unique { columns, .. }) => {
columns.first().unwrap().column.expr.clone()
}
AlterTableOperation::AddConstraint(TableConstraint::PrimaryKey { columns, .. }) => {
columns.first().unwrap().column.expr.clone()
}
AlterTableOperation::AddConstraint(TableConstraint::FulltextOrSpatial {
columns,
..
}) => columns.first().unwrap().column.expr.clone(),
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
_ => panic!("Expected a constraint"),
},
_ => panic!("Expected CREATE INDEX, ALTER TABLE, or CREATE TABLE, got: {stmt:?}"),
}

View file

@ -906,7 +906,10 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Datetime(None),
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()),
span: Span::empty(),
},
}],
fields: vec![StructField {
field_name: None,
@ -965,9 +968,12 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::JSON,
value: Value::SingleQuotedString(
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
)
value: ValueWithSpan {
value: Value::SingleQuotedString(
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
@ -998,7 +1004,12 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Timestamp(None, TimezoneInfo::None),
value: Value::SingleQuotedString("2008-12-25 15:30:00 America/Los_Angeles".into())
value: ValueWithSpan {
value: Value::SingleQuotedString(
"2008-12-25 15:30:00 America/Los_Angeles".into()
),
span: Span::empty(),
},
}],
fields: vec![StructField {
field_name: None,
@ -1013,7 +1024,10 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Time(None, TimezoneInfo::None),
value: Value::SingleQuotedString("15:30:00".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("15:30:00".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
@ -1031,7 +1045,10 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Numeric(ExactNumberInfo::None),
value: Value::SingleQuotedString("1".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
@ -1045,7 +1062,10 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::BigNumeric(ExactNumberInfo::None),
value: Value::SingleQuotedString("1".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
@ -1219,7 +1239,10 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Datetime(None),
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
@ -1278,9 +1301,12 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::JSON,
value: Value::SingleQuotedString(
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
)
value: ValueWithSpan {
value: Value::SingleQuotedString(
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
@ -1311,7 +1337,12 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Timestamp(None, TimezoneInfo::None),
value: Value::SingleQuotedString("2008-12-25 15:30:00 America/Los_Angeles".into())
value: ValueWithSpan {
value: Value::SingleQuotedString(
"2008-12-25 15:30:00 America/Los_Angeles".into()
),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
@ -1326,7 +1357,10 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Time(None, TimezoneInfo::None),
value: Value::SingleQuotedString("15:30:00".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("15:30:00".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
@ -1344,7 +1378,10 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Numeric(ExactNumberInfo::None),
value: Value::SingleQuotedString("1".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
@ -1358,7 +1395,10 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::BigNumeric(ExactNumberInfo::None),
value: Value::SingleQuotedString("1".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
@ -2393,7 +2433,10 @@ fn test_triple_quote_typed_strings() {
assert_eq!(
Expr::TypedString {
data_type: DataType::JSON,
value: Value::TripleDoubleQuotedString(r#"{"foo":"bar's"}"#.into())
value: ValueWithSpan {
value: Value::TripleDoubleQuotedString(r#"{"foo":"bar's"}"#.into()),
span: Span::empty(),
}
},
expr
);

View file

@ -4956,7 +4956,7 @@ fn parse_alter_table_constraints() {
match alter_table_op(verified_stmt(&format!(
"ALTER TABLE tab ADD {constraint_text}"
))) {
AlterTableOperation::AddConstraint(constraint) => {
AlterTableOperation::AddConstraint { constraint, .. } => {
assert_eq!(constraint_text, constraint.to_string());
}
_ => unreachable!(),
@ -5057,22 +5057,21 @@ fn parse_alter_table_alter_column_type() {
AlterColumnOperation::SetDataType {
data_type: DataType::Text,
using: None,
had_set: true,
}
);
}
_ => unreachable!(),
}
verified_stmt(&format!("{alter_stmt} ALTER COLUMN is_active TYPE TEXT"));
let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
let dialects = all_dialects_where(|d| d.supports_alter_column_type_using());
dialects.verified_stmt(&format!(
"{alter_stmt} ALTER COLUMN is_active SET DATA TYPE TEXT USING 'text'"
));
let res =
dialect.parse_sql_statements(&format!("{alter_stmt} ALTER COLUMN is_active TYPE TEXT"));
assert_eq!(
ParserError::ParserError("Expected: SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN, found: TYPE".to_string()),
res.unwrap_err()
);
let res = dialect.parse_sql_statements(&format!(
let dialects = all_dialects_except(|d| d.supports_alter_column_type_using());
let res = dialects.parse_sql_statements(&format!(
"{alter_stmt} ALTER COLUMN is_active SET DATA TYPE TEXT USING 'text'"
));
assert_eq!(
@ -5852,7 +5851,10 @@ fn parse_literal_date() {
assert_eq!(
&Expr::TypedString {
data_type: DataType::Date,
value: Value::SingleQuotedString("1999-01-01".into()),
value: ValueWithSpan {
value: Value::SingleQuotedString("1999-01-01".into()),
span: Span::empty(),
}
},
expr_from_projection(only(&select.projection)),
);
@ -5865,7 +5867,10 @@ fn parse_literal_time() {
assert_eq!(
&Expr::TypedString {
data_type: DataType::Time(None, TimezoneInfo::None),
value: Value::SingleQuotedString("01:23:34".into()),
value: ValueWithSpan {
value: Value::SingleQuotedString("01:23:34".into()),
span: Span::empty(),
},
},
expr_from_projection(only(&select.projection)),
);
@ -5878,7 +5883,10 @@ fn parse_literal_datetime() {
assert_eq!(
&Expr::TypedString {
data_type: DataType::Datetime(None),
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()),
value: ValueWithSpan {
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()),
span: Span::empty(),
},
},
expr_from_projection(only(&select.projection)),
);
@ -5891,7 +5899,10 @@ fn parse_literal_timestamp_without_time_zone() {
assert_eq!(
&Expr::TypedString {
data_type: DataType::Timestamp(None, TimezoneInfo::None),
value: Value::SingleQuotedString("1999-01-01 01:23:34".into()),
value: ValueWithSpan {
value: Value::SingleQuotedString("1999-01-01 01:23:34".into()),
span: Span::empty(),
},
},
expr_from_projection(only(&select.projection)),
);
@ -5906,7 +5917,10 @@ fn parse_literal_timestamp_with_time_zone() {
assert_eq!(
&Expr::TypedString {
data_type: DataType::Timestamp(None, TimezoneInfo::Tz),
value: Value::SingleQuotedString("1999-01-01 01:23:34Z".into()),
value: ValueWithSpan {
value: Value::SingleQuotedString("1999-01-01 01:23:34Z".into()),
span: Span::empty(),
},
},
expr_from_projection(only(&select.projection)),
);
@ -6478,8 +6492,9 @@ fn parse_json_keyword() {
assert_eq!(
&Expr::TypedString {
data_type: DataType::JSON,
value: Value::SingleQuotedString(
r#"{
value: ValueWithSpan {
value: Value::SingleQuotedString(
r#"{
"id": 10,
"type": "fruit",
"name": "apple",
@ -6499,8 +6514,10 @@ fn parse_json_keyword() {
]
}
}"#
.to_string()
)
.to_string()
),
span: Span::empty(),
}
},
expr_from_projection(only(&select.projection)),
);
@ -6512,7 +6529,10 @@ fn parse_typed_strings() {
assert_eq!(
Expr::TypedString {
data_type: DataType::JSON,
value: Value::SingleQuotedString(r#"{"foo":"bar"}"#.into())
value: ValueWithSpan {
value: Value::SingleQuotedString(r#"{"foo":"bar"}"#.into()),
span: Span::empty(),
}
},
expr
);
@ -6530,7 +6550,10 @@ fn parse_bignumeric_keyword() {
assert_eq!(
&Expr::TypedString {
data_type: DataType::BigNumeric(ExactNumberInfo::None),
value: Value::SingleQuotedString(r#"0"#.into())
value: ValueWithSpan {
value: Value::SingleQuotedString(r#"0"#.into()),
span: Span::empty(),
}
},
expr_from_projection(only(&select.projection)),
);
@ -6541,7 +6564,10 @@ fn parse_bignumeric_keyword() {
assert_eq!(
&Expr::TypedString {
data_type: DataType::BigNumeric(ExactNumberInfo::None),
value: Value::SingleQuotedString(r#"123456"#.into())
value: ValueWithSpan {
value: Value::SingleQuotedString(r#"123456"#.into()),
span: Span::empty(),
}
},
expr_from_projection(only(&select.projection)),
);
@ -6552,7 +6578,10 @@ fn parse_bignumeric_keyword() {
assert_eq!(
&Expr::TypedString {
data_type: DataType::BigNumeric(ExactNumberInfo::None),
value: Value::SingleQuotedString(r#"-3.14"#.into())
value: ValueWithSpan {
value: Value::SingleQuotedString(r#"-3.14"#.into()),
span: Span::empty(),
}
},
expr_from_projection(only(&select.projection)),
);
@ -6563,7 +6592,10 @@ fn parse_bignumeric_keyword() {
assert_eq!(
&Expr::TypedString {
data_type: DataType::BigNumeric(ExactNumberInfo::None),
value: Value::SingleQuotedString(r#"-0.54321"#.into())
value: ValueWithSpan {
value: Value::SingleQuotedString(r#"-0.54321"#.into()),
span: Span::empty(),
}
},
expr_from_projection(only(&select.projection)),
);
@ -6574,7 +6606,10 @@ fn parse_bignumeric_keyword() {
assert_eq!(
&Expr::TypedString {
data_type: DataType::BigNumeric(ExactNumberInfo::None),
value: Value::SingleQuotedString(r#"1.23456e05"#.into())
value: ValueWithSpan {
value: Value::SingleQuotedString(r#"1.23456e05"#.into()),
span: Span::empty(),
}
},
expr_from_projection(only(&select.projection)),
);
@ -6585,7 +6620,10 @@ fn parse_bignumeric_keyword() {
assert_eq!(
&Expr::TypedString {
data_type: DataType::BigNumeric(ExactNumberInfo::None),
value: Value::SingleQuotedString(r#"-9.876e-3"#.into())
value: ValueWithSpan {
value: Value::SingleQuotedString(r#"-9.876e-3"#.into()),
span: Span::empty(),
}
},
expr_from_projection(only(&select.projection)),
);
@ -7762,7 +7800,6 @@ fn parse_trim() {
Box::new(MySqlDialect {}),
//Box::new(BigQueryDialect {}),
Box::new(SQLiteDialect {}),
Box::new(DuckDbDialect {}),
]);
assert_eq!(
@ -11068,10 +11105,17 @@ fn parse_non_latin_identifiers() {
Box::new(RedshiftSqlDialect {}),
Box::new(MySqlDialect {}),
]);
supported_dialects.verified_stmt("SELECT a.説明 FROM test.public.inter01 AS a");
supported_dialects.verified_stmt("SELECT a.説明 FROM inter01 AS a, inter01_transactions AS b WHERE a.説明 = b.取引 GROUP BY a.説明");
supported_dialects.verified_stmt("SELECT 説明, hühnervögel, garçon, Москва, 東京 FROM inter01");
let supported_dialects = TestedDialects::new(vec![
Box::new(GenericDialect {}),
Box::new(DuckDbDialect {}),
Box::new(PostgreSqlDialect {}),
Box::new(MsSqlDialect {}),
Box::new(MySqlDialect {}),
]);
assert!(supported_dialects
.parse_sql_statements("SELECT 💝 FROM table1")
.is_err());
@ -14828,7 +14872,10 @@ fn test_geometry_type() {
all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql),
Expr::TypedString {
data_type: DataType::GeometricType(GeometricTypeKind::Point),
value: Value::SingleQuotedString("1,2".to_string()),
value: ValueWithSpan {
value: Value::SingleQuotedString("1,2".to_string()),
span: Span::empty(),
},
}
);
@ -14837,7 +14884,10 @@ fn test_geometry_type() {
all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql),
Expr::TypedString {
data_type: DataType::GeometricType(GeometricTypeKind::Line),
value: Value::SingleQuotedString("1,2,3,4".to_string()),
value: ValueWithSpan {
value: Value::SingleQuotedString("1,2,3,4".to_string()),
span: Span::empty(),
},
}
);
@ -14846,7 +14896,10 @@ fn test_geometry_type() {
all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql),
Expr::TypedString {
data_type: DataType::GeometricType(GeometricTypeKind::GeometricPath),
value: Value::SingleQuotedString("1,2,3,4".to_string()),
value: ValueWithSpan {
value: Value::SingleQuotedString("1,2,3,4".to_string()),
span: Span::empty(),
},
}
);
let sql = "box '1,2,3,4'";
@ -14854,7 +14907,10 @@ fn test_geometry_type() {
all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql),
Expr::TypedString {
data_type: DataType::GeometricType(GeometricTypeKind::GeometricBox),
value: Value::SingleQuotedString("1,2,3,4".to_string()),
value: ValueWithSpan {
value: Value::SingleQuotedString("1,2,3,4".to_string()),
span: Span::empty(),
},
}
);
@ -14863,7 +14919,10 @@ fn test_geometry_type() {
all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql),
Expr::TypedString {
data_type: DataType::GeometricType(GeometricTypeKind::Circle),
value: Value::SingleQuotedString("1,2,3".to_string()),
value: ValueWithSpan {
value: Value::SingleQuotedString("1,2,3".to_string()),
span: Span::empty(),
},
}
);
@ -14872,7 +14931,10 @@ fn test_geometry_type() {
all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql),
Expr::TypedString {
data_type: DataType::GeometricType(GeometricTypeKind::Polygon),
value: Value::SingleQuotedString("1,2,3,4".to_string()),
value: ValueWithSpan {
value: Value::SingleQuotedString("1,2,3,4".to_string()),
span: Span::empty(),
},
}
);
let sql = "lseg '1,2,3,4'";
@ -14880,7 +14942,10 @@ fn test_geometry_type() {
all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql),
Expr::TypedString {
data_type: DataType::GeometricType(GeometricTypeKind::LineSegment),
value: Value::SingleQuotedString("1,2,3,4".to_string()),
value: ValueWithSpan {
value: Value::SingleQuotedString("1,2,3,4".to_string()),
span: Span::empty(),
},
}
);
}
@ -15217,10 +15282,426 @@ fn parse_pipeline_operator() {
dialects.verified_stmt("SELECT * FROM tbl |> TABLESAMPLE SYSTEM (50 PERCENT)");
dialects.verified_stmt("SELECT * FROM tbl |> TABLESAMPLE SYSTEM (50) REPEATABLE (10)");
// rename pipe operator
dialects.verified_stmt("SELECT * FROM users |> RENAME old_name AS new_name");
dialects.verified_stmt("SELECT * FROM users |> RENAME id AS user_id, name AS user_name");
dialects.verified_query_with_canonical(
"SELECT * FROM users |> RENAME id user_id",
"SELECT * FROM users |> RENAME id AS user_id",
);
// union pipe operator
dialects.verified_stmt("SELECT * FROM users |> UNION ALL (SELECT * FROM admins)");
dialects.verified_stmt("SELECT * FROM users |> UNION DISTINCT (SELECT * FROM admins)");
dialects.verified_stmt("SELECT * FROM users |> UNION (SELECT * FROM admins)");
// union pipe operator with multiple queries
dialects.verified_stmt(
"SELECT * FROM users |> UNION ALL (SELECT * FROM admins), (SELECT * FROM guests)",
);
dialects.verified_stmt("SELECT * FROM users |> UNION DISTINCT (SELECT * FROM admins), (SELECT * FROM guests), (SELECT * FROM employees)");
dialects.verified_stmt(
"SELECT * FROM users |> UNION (SELECT * FROM admins), (SELECT * FROM guests)",
);
// union pipe operator with BY NAME modifier
dialects.verified_stmt("SELECT * FROM users |> UNION BY NAME (SELECT * FROM admins)");
dialects.verified_stmt("SELECT * FROM users |> UNION ALL BY NAME (SELECT * FROM admins)");
dialects.verified_stmt("SELECT * FROM users |> UNION DISTINCT BY NAME (SELECT * FROM admins)");
// union pipe operator with BY NAME and multiple queries
dialects.verified_stmt(
"SELECT * FROM users |> UNION BY NAME (SELECT * FROM admins), (SELECT * FROM guests)",
);
// intersect pipe operator (BigQuery requires DISTINCT modifier for INTERSECT)
dialects.verified_stmt("SELECT * FROM users |> INTERSECT DISTINCT (SELECT * FROM admins)");
// intersect pipe operator with BY NAME modifier
dialects
.verified_stmt("SELECT * FROM users |> INTERSECT DISTINCT BY NAME (SELECT * FROM admins)");
// intersect pipe operator with multiple queries
dialects.verified_stmt(
"SELECT * FROM users |> INTERSECT DISTINCT (SELECT * FROM admins), (SELECT * FROM guests)",
);
// intersect pipe operator with BY NAME and multiple queries
dialects.verified_stmt("SELECT * FROM users |> INTERSECT DISTINCT BY NAME (SELECT * FROM admins), (SELECT * FROM guests)");
// except pipe operator (BigQuery requires DISTINCT modifier for EXCEPT)
dialects.verified_stmt("SELECT * FROM users |> EXCEPT DISTINCT (SELECT * FROM admins)");
// except pipe operator with BY NAME modifier
dialects.verified_stmt("SELECT * FROM users |> EXCEPT DISTINCT BY NAME (SELECT * FROM admins)");
// except pipe operator with multiple queries
dialects.verified_stmt(
"SELECT * FROM users |> EXCEPT DISTINCT (SELECT * FROM admins), (SELECT * FROM guests)",
);
// except pipe operator with BY NAME and multiple queries
dialects.verified_stmt("SELECT * FROM users |> EXCEPT DISTINCT BY NAME (SELECT * FROM admins), (SELECT * FROM guests)");
// call pipe operator
dialects.verified_stmt("SELECT * FROM users |> CALL my_function()");
dialects.verified_stmt("SELECT * FROM users |> CALL process_data(5, 'test')");
dialects.verified_stmt(
"SELECT * FROM users |> CALL namespace.function_name(col1, col2, 'literal')",
);
// call pipe operator with complex arguments
dialects.verified_stmt("SELECT * FROM users |> CALL transform_data(col1 + col2)");
dialects.verified_stmt("SELECT * FROM users |> CALL analyze_data('param1', 100, true)");
// call pipe operator with aliases
dialects.verified_stmt("SELECT * FROM input_table |> CALL tvf1(arg1) AS al");
dialects.verified_stmt("SELECT * FROM users |> CALL process_data(5) AS result_table");
dialects.verified_stmt("SELECT * FROM users |> CALL namespace.func() AS my_alias");
// multiple call pipe operators in sequence
dialects.verified_stmt("SELECT * FROM input_table |> CALL tvf1(arg1) |> CALL tvf2(arg2, arg3)");
dialects.verified_stmt(
"SELECT * FROM data |> CALL transform(col1) |> CALL validate() |> CALL process(param)",
);
// multiple call pipe operators with aliases
dialects.verified_stmt(
"SELECT * FROM input_table |> CALL tvf1(arg1) AS step1 |> CALL tvf2(arg2) AS step2",
);
dialects.verified_stmt(
"SELECT * FROM data |> CALL preprocess() AS clean_data |> CALL analyze(mode) AS results",
);
// call pipe operators mixed with other pipe operators
dialects.verified_stmt(
"SELECT * FROM users |> CALL transform() |> WHERE status = 'active' |> CALL process(param)",
);
dialects.verified_stmt(
"SELECT * FROM data |> CALL preprocess() AS clean |> SELECT col1, col2 |> CALL validate()",
);
// pivot pipe operator
dialects.verified_stmt(
"SELECT * FROM monthly_sales |> PIVOT(SUM(amount) FOR quarter IN ('Q1', 'Q2', 'Q3', 'Q4'))",
);
dialects.verified_stmt("SELECT * FROM sales_data |> PIVOT(AVG(revenue) FOR region IN ('North', 'South', 'East', 'West'))");
// pivot pipe operator with multiple aggregate functions
dialects.verified_stmt("SELECT * FROM data |> PIVOT(SUM(sales) AS total_sales, COUNT(*) AS num_transactions FOR month IN ('Jan', 'Feb', 'Mar'))");
// pivot pipe operator with compound column names
dialects.verified_stmt("SELECT * FROM sales |> PIVOT(SUM(amount) FOR product.category IN ('Electronics', 'Clothing'))");
// pivot pipe operator mixed with other pipe operators
dialects.verified_stmt("SELECT * FROM sales_data |> WHERE year = 2023 |> PIVOT(SUM(revenue) FOR quarter IN ('Q1', 'Q2', 'Q3', 'Q4'))");
// pivot pipe operator with aliases
dialects.verified_stmt("SELECT * FROM monthly_sales |> PIVOT(SUM(sales) FOR quarter IN ('Q1', 'Q2')) AS quarterly_sales");
dialects.verified_stmt("SELECT * FROM data |> PIVOT(AVG(price) FOR category IN ('A', 'B', 'C')) AS avg_by_category");
dialects.verified_stmt("SELECT * FROM sales |> PIVOT(COUNT(*) AS transactions, SUM(amount) AS total FOR region IN ('North', 'South')) AS regional_summary");
// pivot pipe operator with implicit aliases (without AS keyword)
dialects.verified_query_with_canonical(
"SELECT * FROM monthly_sales |> PIVOT(SUM(sales) FOR quarter IN ('Q1', 'Q2')) quarterly_sales",
"SELECT * FROM monthly_sales |> PIVOT(SUM(sales) FOR quarter IN ('Q1', 'Q2')) AS quarterly_sales",
);
dialects.verified_query_with_canonical(
"SELECT * FROM data |> PIVOT(AVG(price) FOR category IN ('A', 'B', 'C')) avg_by_category",
"SELECT * FROM data |> PIVOT(AVG(price) FOR category IN ('A', 'B', 'C')) AS avg_by_category",
);
// unpivot pipe operator basic usage
dialects
.verified_stmt("SELECT * FROM sales |> UNPIVOT(revenue FOR quarter IN (Q1, Q2, Q3, Q4))");
dialects.verified_stmt("SELECT * FROM data |> UNPIVOT(value FOR category IN (A, B, C))");
dialects.verified_stmt(
"SELECT * FROM metrics |> UNPIVOT(measurement FOR metric_type IN (cpu, memory, disk))",
);
// unpivot pipe operator with multiple columns
dialects.verified_stmt("SELECT * FROM quarterly_sales |> UNPIVOT(amount FOR period IN (jan, feb, mar, apr, may, jun))");
dialects.verified_stmt(
"SELECT * FROM report |> UNPIVOT(score FOR subject IN (math, science, english, history))",
);
// unpivot pipe operator mixed with other pipe operators
dialects.verified_stmt("SELECT * FROM sales_data |> WHERE year = 2023 |> UNPIVOT(revenue FOR quarter IN (Q1, Q2, Q3, Q4))");
// unpivot pipe operator with aliases
dialects.verified_stmt("SELECT * FROM quarterly_sales |> UNPIVOT(amount FOR period IN (Q1, Q2)) AS unpivoted_sales");
dialects.verified_stmt(
"SELECT * FROM data |> UNPIVOT(value FOR category IN (A, B, C)) AS transformed_data",
);
dialects.verified_stmt("SELECT * FROM metrics |> UNPIVOT(measurement FOR metric_type IN (cpu, memory)) AS metric_measurements");
// unpivot pipe operator with implicit aliases (without AS keyword)
dialects.verified_query_with_canonical(
"SELECT * FROM quarterly_sales |> UNPIVOT(amount FOR period IN (Q1, Q2)) unpivoted_sales",
"SELECT * FROM quarterly_sales |> UNPIVOT(amount FOR period IN (Q1, Q2)) AS unpivoted_sales",
);
dialects.verified_query_with_canonical(
"SELECT * FROM data |> UNPIVOT(value FOR category IN (A, B, C)) transformed_data",
"SELECT * FROM data |> UNPIVOT(value FOR category IN (A, B, C)) AS transformed_data",
);
// many pipes
dialects.verified_stmt(
"SELECT * FROM CustomerOrders |> AGGREGATE SUM(cost) AS total_cost GROUP BY customer_id, state, item_type |> EXTEND COUNT(*) OVER (PARTITION BY customer_id) AS num_orders |> WHERE num_orders > 1 |> AGGREGATE AVG(total_cost) AS average GROUP BY state DESC, item_type ASC",
);
// join pipe operator - INNER JOIN
dialects.verified_stmt("SELECT * FROM users |> JOIN orders ON users.id = orders.user_id");
dialects.verified_stmt("SELECT * FROM users |> INNER JOIN orders ON users.id = orders.user_id");
// join pipe operator - LEFT JOIN
dialects.verified_stmt("SELECT * FROM users |> LEFT JOIN orders ON users.id = orders.user_id");
dialects.verified_stmt(
"SELECT * FROM users |> LEFT OUTER JOIN orders ON users.id = orders.user_id",
);
// join pipe operator - RIGHT JOIN
dialects.verified_stmt("SELECT * FROM users |> RIGHT JOIN orders ON users.id = orders.user_id");
dialects.verified_stmt(
"SELECT * FROM users |> RIGHT OUTER JOIN orders ON users.id = orders.user_id",
);
// join pipe operator - FULL JOIN
dialects.verified_stmt("SELECT * FROM users |> FULL JOIN orders ON users.id = orders.user_id");
dialects.verified_query_with_canonical(
"SELECT * FROM users |> FULL OUTER JOIN orders ON users.id = orders.user_id",
"SELECT * FROM users |> FULL JOIN orders ON users.id = orders.user_id",
);
// join pipe operator - CROSS JOIN
dialects.verified_stmt("SELECT * FROM users |> CROSS JOIN orders");
// join pipe operator with USING
dialects.verified_query_with_canonical(
"SELECT * FROM users |> JOIN orders USING (user_id)",
"SELECT * FROM users |> JOIN orders USING(user_id)",
);
dialects.verified_query_with_canonical(
"SELECT * FROM users |> LEFT JOIN orders USING (user_id, order_date)",
"SELECT * FROM users |> LEFT JOIN orders USING(user_id, order_date)",
);
// join pipe operator with alias
dialects.verified_query_with_canonical(
"SELECT * FROM users |> JOIN orders o ON users.id = o.user_id",
"SELECT * FROM users |> JOIN orders AS o ON users.id = o.user_id",
);
dialects.verified_stmt("SELECT * FROM users |> LEFT JOIN orders AS o ON users.id = o.user_id");
// join pipe operator with complex ON condition
dialects.verified_stmt("SELECT * FROM users |> JOIN orders ON users.id = orders.user_id AND orders.status = 'active'");
dialects.verified_stmt("SELECT * FROM users |> LEFT JOIN orders ON users.id = orders.user_id AND orders.amount > 100");
// multiple join pipe operators
dialects.verified_stmt("SELECT * FROM users |> JOIN orders ON users.id = orders.user_id |> JOIN products ON orders.product_id = products.id");
dialects.verified_stmt("SELECT * FROM users |> LEFT JOIN orders ON users.id = orders.user_id |> RIGHT JOIN products ON orders.product_id = products.id");
// join pipe operator with other pipe operators
dialects.verified_stmt("SELECT * FROM users |> JOIN orders ON users.id = orders.user_id |> WHERE orders.amount > 100");
dialects.verified_stmt("SELECT * FROM users |> WHERE users.active = true |> LEFT JOIN orders ON users.id = orders.user_id");
dialects.verified_stmt("SELECT * FROM users |> JOIN orders ON users.id = orders.user_id |> SELECT users.name, orders.amount");
}
#[test]
fn parse_pipeline_operator_negative_tests() {
let dialects = all_dialects_where(|d| d.supports_pipe_operator());
// Test that plain EXCEPT without DISTINCT fails
assert_eq!(
ParserError::ParserError("EXCEPT pipe operator requires DISTINCT modifier".to_string()),
dialects
.parse_sql_statements("SELECT * FROM users |> EXCEPT (SELECT * FROM admins)")
.unwrap_err()
);
// Test that EXCEPT ALL fails
assert_eq!(
ParserError::ParserError("EXCEPT pipe operator requires DISTINCT modifier".to_string()),
dialects
.parse_sql_statements("SELECT * FROM users |> EXCEPT ALL (SELECT * FROM admins)")
.unwrap_err()
);
// Test that EXCEPT BY NAME without DISTINCT fails
assert_eq!(
ParserError::ParserError("EXCEPT pipe operator requires DISTINCT modifier".to_string()),
dialects
.parse_sql_statements("SELECT * FROM users |> EXCEPT BY NAME (SELECT * FROM admins)")
.unwrap_err()
);
// Test that EXCEPT ALL BY NAME fails
assert_eq!(
ParserError::ParserError("EXCEPT pipe operator requires DISTINCT modifier".to_string()),
dialects
.parse_sql_statements(
"SELECT * FROM users |> EXCEPT ALL BY NAME (SELECT * FROM admins)"
)
.unwrap_err()
);
// Test that plain INTERSECT without DISTINCT fails
assert_eq!(
ParserError::ParserError("INTERSECT pipe operator requires DISTINCT modifier".to_string()),
dialects
.parse_sql_statements("SELECT * FROM users |> INTERSECT (SELECT * FROM admins)")
.unwrap_err()
);
// Test that INTERSECT ALL fails
assert_eq!(
ParserError::ParserError("INTERSECT pipe operator requires DISTINCT modifier".to_string()),
dialects
.parse_sql_statements("SELECT * FROM users |> INTERSECT ALL (SELECT * FROM admins)")
.unwrap_err()
);
// Test that INTERSECT BY NAME without DISTINCT fails
assert_eq!(
ParserError::ParserError("INTERSECT pipe operator requires DISTINCT modifier".to_string()),
dialects
.parse_sql_statements("SELECT * FROM users |> INTERSECT BY NAME (SELECT * FROM admins)")
.unwrap_err()
);
// Test that INTERSECT ALL BY NAME fails
assert_eq!(
ParserError::ParserError("INTERSECT pipe operator requires DISTINCT modifier".to_string()),
dialects
.parse_sql_statements(
"SELECT * FROM users |> INTERSECT ALL BY NAME (SELECT * FROM admins)"
)
.unwrap_err()
);
// Test that CALL without function name fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> CALL")
.is_err());
// Test that CALL without parentheses fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> CALL my_function")
.is_err());
// Test that CALL with invalid function syntax fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> CALL 123invalid")
.is_err());
// Test that CALL with malformed arguments fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> CALL my_function(,)")
.is_err());
// Test that CALL with invalid alias syntax fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> CALL my_function() AS")
.is_err());
// Test that PIVOT without parentheses fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> PIVOT SUM(amount) FOR month IN ('Jan')")
.is_err());
// Test that PIVOT without FOR keyword fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> PIVOT(SUM(amount) month IN ('Jan'))")
.is_err());
// Test that PIVOT without IN keyword fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> PIVOT(SUM(amount) FOR month ('Jan'))")
.is_err());
// Test that PIVOT with empty IN list fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> PIVOT(SUM(amount) FOR month IN ())")
.is_err());
// Test that PIVOT with invalid alias syntax fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> PIVOT(SUM(amount) FOR month IN ('Jan')) AS")
.is_err());
// Test UNPIVOT negative cases
// Test that UNPIVOT without parentheses fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> UNPIVOT value FOR name IN col1, col2")
.is_err());
// Test that UNPIVOT without FOR keyword fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> UNPIVOT(value name IN (col1, col2))")
.is_err());
// Test that UNPIVOT without IN keyword fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> UNPIVOT(value FOR name (col1, col2))")
.is_err());
// Test that UNPIVOT with missing value column fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> UNPIVOT(FOR name IN (col1, col2))")
.is_err());
// Test that UNPIVOT with missing name column fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> UNPIVOT(value FOR IN (col1, col2))")
.is_err());
// Test that UNPIVOT with empty IN list fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> UNPIVOT(value FOR name IN ())")
.is_err());
// Test that UNPIVOT with invalid alias syntax fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> UNPIVOT(value FOR name IN (col1, col2)) AS")
.is_err());
// Test that UNPIVOT with missing closing parenthesis fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> UNPIVOT(value FOR name IN (col1, col2)")
.is_err());
// Test that JOIN without table name fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> JOIN ON users.id = orders.user_id")
.is_err());
// Test that CROSS JOIN with ON condition fails
assert!(dialects
.parse_sql_statements(
"SELECT * FROM users |> CROSS JOIN orders ON users.id = orders.user_id"
)
.is_err());
// Test that CROSS JOIN with USING condition fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> CROSS JOIN orders USING (user_id)")
.is_err());
// Test that JOIN with empty USING list fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> JOIN orders USING ()")
.is_err());
// Test that JOIN with malformed ON condition fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> JOIN orders ON")
.is_err());
// Test that JOIN with invalid USING syntax fails
assert!(dialects
.parse_sql_statements("SELECT * FROM users |> JOIN orders USING user_id")
.is_err());
}
#[test]
@ -15359,6 +15840,29 @@ fn check_enforced() {
);
}
#[test]
fn join_precedence() {
all_dialects_except(|d| !d.supports_left_associative_joins_without_parens())
.verified_query_with_canonical(
"SELECT *
FROM t1
NATURAL JOIN t5
INNER JOIN t0 ON (t0.v1 + t5.v0) > 0
WHERE t0.v1 = t1.v0",
// canonical string without parentheses
"SELECT * FROM t1 NATURAL JOIN t5 INNER JOIN t0 ON (t0.v1 + t5.v0) > 0 WHERE t0.v1 = t1.v0",
);
all_dialects_except(|d| d.supports_left_associative_joins_without_parens()).verified_query_with_canonical(
"SELECT *
FROM t1
NATURAL JOIN t5
INNER JOIN t0 ON (t0.v1 + t5.v0) > 0
WHERE t0.v1 = t1.v0",
// canonical string with parentheses
"SELECT * FROM t1 NATURAL JOIN (t5 INNER JOIN t0 ON (t0.v1 + t5.v0) > 0) WHERE t0.v1 = t1.v0",
);
}
#[test]
fn parse_create_procedure_with_language() {
let sql = r#"CREATE PROCEDURE test_proc LANGUAGE sql AS BEGIN SELECT 1; END"#;

View file

@ -19,6 +19,7 @@ use sqlparser::ast::helpers::attached_token::AttachedToken;
use sqlparser::ast::*;
use sqlparser::dialect::{DatabricksDialect, GenericDialect};
use sqlparser::parser::ParserError;
use sqlparser::tokenizer::Span;
use test_utils::*;
#[macro_use]
@ -328,7 +329,10 @@ fn data_type_timestamp_ntz() {
databricks().verified_expr("TIMESTAMP_NTZ '2025-03-29T18:52:00'"),
Expr::TypedString {
data_type: DataType::TimestampNtz,
value: Value::SingleQuotedString("2025-03-29T18:52:00".to_owned())
value: ValueWithSpan {
value: Value::SingleQuotedString("2025-03-29T18:52:00".to_owned()),
span: Span::empty(),
}
}
);

View file

@ -24,6 +24,7 @@ use test_utils::*;
use sqlparser::ast::*;
use sqlparser::dialect::{DuckDbDialect, GenericDialect};
use sqlparser::parser::ParserError;
fn duckdb() -> TestedDialects {
TestedDialects::new(vec![Box::new(DuckDbDialect {})])
@ -830,3 +831,32 @@ fn parse_use() {
])))
);
}
#[test]
fn test_duckdb_trim() {
let real_sql = r#"SELECT customer_id, TRIM(item_price_id, '"', "a") AS item_price_id FROM models_staging.subscriptions"#;
assert_eq!(duckdb().verified_stmt(real_sql).to_string(), real_sql);
let sql_only_select = "SELECT TRIM('xyz', 'a')";
let select = duckdb().verified_only_select(sql_only_select);
assert_eq!(
&Expr::Trim {
expr: Box::new(Expr::Value(
Value::SingleQuotedString("xyz".to_owned()).with_empty_span()
)),
trim_where: None,
trim_what: None,
trim_characters: Some(vec![Expr::Value(
Value::SingleQuotedString("a".to_owned()).with_empty_span()
)]),
},
expr_from_projection(only(&select.projection))
);
// missing comma separation
let error_sql = "SELECT TRIM('xyz' 'a')";
assert_eq!(
ParserError::ParserError("Expected: ), found: 'a'".to_owned()),
duckdb().parse_sql_statements(error_sql).unwrap_err()
);
}

View file

@ -4109,3 +4109,28 @@ fn parse_alter_table_drop_index() {
AlterTableOperation::DropIndex { name } if name.value == "idx_index"
);
}
#[test]
fn parse_json_member_of() {
mysql().verified_stmt(r#"SELECT 17 MEMBER OF('[23, "abc", 17, "ab", 10]')"#);
let sql = r#"SELECT 'ab' MEMBER OF('[23, "abc", 17, "ab", 10]')"#;
let stmt = mysql().verified_stmt(sql);
match stmt {
Statement::Query(query) => {
let select = query.body.as_select().unwrap();
assert_eq!(
select.projection,
vec![SelectItem::UnnamedExpr(Expr::MemberOf(MemberOf {
value: Box::new(Expr::Value(
Value::SingleQuotedString("ab".to_string()).into()
)),
array: Box::new(Expr::Value(
Value::SingleQuotedString(r#"[23, "abc", 17, "ab", 10]"#.to_string())
.into()
)),
}))]
);
}
_ => panic!("Unexpected statement {stmt}"),
}
}

View file

@ -606,9 +606,10 @@ fn parse_alter_table_constraints_unique_nulls_distinct() {
.verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE NULLS NOT DISTINCT (c)")
{
Statement::AlterTable { operations, .. } => match &operations[0] {
AlterTableOperation::AddConstraint(TableConstraint::Unique {
nulls_distinct, ..
}) => {
AlterTableOperation::AddConstraint {
constraint: TableConstraint::Unique { nulls_distinct, .. },
..
} => {
assert_eq!(nulls_distinct, &NullsDistinctOption::NotDistinct)
}
_ => unreachable!(),
@ -764,10 +765,7 @@ fn parse_drop_extension() {
#[test]
fn parse_alter_table_alter_column() {
pg().one_statement_parses_to(
"ALTER TABLE tab ALTER COLUMN is_active TYPE TEXT USING 'text'",
"ALTER TABLE tab ALTER COLUMN is_active SET DATA TYPE TEXT USING 'text'",
);
pg().verified_stmt("ALTER TABLE tab ALTER COLUMN is_active TYPE TEXT USING 'text'");
match alter_table_op(
pg().verified_stmt(
@ -783,6 +781,7 @@ fn parse_alter_table_alter_column() {
AlterColumnOperation::SetDataType {
data_type: DataType::Text,
using: Some(using_expr),
had_set: true,
}
);
}
@ -5258,7 +5257,10 @@ fn parse_at_time_zone() {
left: Box::new(Expr::AtTimeZone {
timestamp: Box::new(Expr::TypedString {
data_type: DataType::Timestamp(None, TimezoneInfo::None),
value: Value::SingleQuotedString("2001-09-28 01:00".to_string()),
value: ValueWithSpan {
value: Value::SingleQuotedString("2001-09-28 01:00".to_string()),
span: Span::empty(),
},
}),
time_zone: Box::new(Expr::Cast {
kind: CastKind::DoubleColon,
@ -6232,6 +6234,46 @@ fn parse_ts_datatypes() {
}
}
#[test]
fn parse_alter_table_constraint_not_valid() {
match pg_and_generic().verified_stmt(
"ALTER TABLE foo ADD CONSTRAINT bar FOREIGN KEY (baz) REFERENCES other(ref) NOT VALID",
) {
Statement::AlterTable { operations, .. } => {
assert_eq!(
operations,
vec![AlterTableOperation::AddConstraint {
constraint: TableConstraint::ForeignKey {
name: Some("bar".into()),
index_name: None,
columns: vec!["baz".into()],
foreign_table: ObjectName::from(vec!["other".into()]),
referred_columns: vec!["ref".into()],
on_delete: None,
on_update: None,
characteristics: None,
},
not_valid: true,
}]
);
}
_ => unreachable!(),
}
}
#[test]
fn parse_alter_table_validate_constraint() {
match pg_and_generic().verified_stmt("ALTER TABLE foo VALIDATE CONSTRAINT bar") {
Statement::AlterTable { operations, .. } => {
assert_eq!(
operations,
vec![AlterTableOperation::ValidateConstraint { name: "bar".into() }]
);
}
_ => unreachable!(),
}
}
#[test]
fn parse_create_server() {
let test_cases = vec![

View file

@ -402,3 +402,8 @@ fn parse_extract_single_quotes() {
fn parse_string_literal_backslash_escape() {
redshift().one_statement_parses_to(r#"SELECT 'l\'auto'"#, "SELECT 'l''auto'");
}
#[test]
fn parse_utf8_multibyte_idents() {
redshift().verified_stmt("SELECT 🚀.city AS 🎸 FROM customers AS 🚀");
}