Compare commits

..

33 commits

Author SHA1 Message Date
Yoav Cohen
93450cc250
Add Snowflake COPY/REVOKE CURRENT GRANTS option (#1926)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
2025-07-07 17:13:57 +02:00
Sergey Olontsev
1a33abda63
Clickhouse: support empty parenthesized options (#1925)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
2025-07-06 09:06:20 +02:00
Yoav Cohen
f2fba48a7a
Add support for several Snowflake grant statements (#1922) 2025-07-06 08:58:19 +02:00
Simon Vandel Sillesen
cf9e50474e
Make GenericDialect support trailing commas in projections (#1921) 2025-07-06 08:57:20 +02:00
Yoav Cohen
ed8757f2f0
Align Snowflake dialect to new test of reserved keywords (#1924)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-07-05 08:40:35 +02:00
Yoav Cohen
d2466af20a
Add support for dropping multiple columns in Snowflake (#1918) 2025-07-05 08:18:58 +02:00
Yoav Cohen
b0bcc46e22
Add support for NULL escape char in pattern match searches (#1913)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-07-04 21:04:51 +02:00
Elia Perantoni
942d747d89
Change tag and policy names to ObjectName (#1892) 2025-07-04 18:21:31 +02:00
Sergey Olontsev
239e30a97c
Support for Postgres CREATE SERVER (#1914)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
Co-authored-by: Ifeanyi Ubah <ify1992@yahoo.com>
2025-07-03 19:04:32 +02:00
feral-dot-io
9020385c02
Add span for Expr::TypedString (#1919) 2025-07-03 18:24:51 +02:00
Yoav Cohen
be2d2f14e7
Add support for MySQL MEMBER OF (#1917) 2025-07-03 18:22:17 +02:00
carl
418b94227a
Postgres: support ADD CONSTRAINT NOT VALID and VALIDATE CONSTRAINT (#1908) 2025-07-03 18:19:26 +02:00
Yoav Cohen
015caca611
Redshift alter column type no set (#1912) 2025-07-03 18:16:21 +02:00
Ryan Schneider
a3398223d7
DuckDB: Add support for multiple TRIM arguments (#1916)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
Co-authored-by: Ifeanyi Ubah <ify1992@yahoo.com>
2025-07-02 14:57:08 +02:00
Yoav Cohen
f32a41a004
Redshift utf8 idents (#1915)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-07-01 13:52:29 +02:00
Simon Vandel Sillesen
9ffc546870
Make GenericDialect support from-first syntax (#1911) 2025-07-01 13:19:40 +02:00
Simon Vandel Sillesen
abd80f9ecb
Support remaining pipe operators (#1879)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-30 17:51:55 +02:00
Dima
3bc94234df
Fix join precedence for non-snowflake queries (#1905)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
2025-06-28 20:24:25 +02:00
Sergey Olontsev
50c605a471
Support for Map values in ClickHouse settings (#1896)
Some checks are pending
Rust / test (beta) (push) Waiting to run
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Co-authored-by: Ifeanyi Ubah <ify1992@yahoo.com>
2025-06-28 08:13:11 +02:00
Yoav Cohen
6c38cdcadb
Snowflake: Add support for future grants (#1906)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-27 21:21:38 +02:00
Ifeanyi Ubah
5f2b5fe7be
Fix clippy lints on 1.88.0 (#1910) 2025-06-27 14:21:17 -04:00
ZacJW
95d16e3b2d
Add support for LANGUAGE clause in CREATE PROCEDURE (#1903) 2025-06-27 18:22:21 +02:00
Elia Perantoni
1bbc05cdff
Snowflake: support multiple column options in CREATE VIEW (#1891)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
2025-06-25 16:10:01 +02:00
Elia Perantoni
b2ab0061c1
Fix impl Ord for Ident (#1893)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-25 12:21:59 +02:00
ZacJW
b9365b3853
Support procedure argmode (#1901)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-24 08:39:02 +02:00
Denys Tsomenko
44f3be38e5
fix: parse snowflake fetch clause (#1894) 2025-06-24 08:29:44 +02:00
Michael Victor Zink
5d63663bc6
Use IndexColumn in all index definitions (#1900) 2025-06-24 08:18:03 +02:00
Dima
7865de015f
Fix limit in subqueries (#1899)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
2025-06-22 09:22:45 +02:00
Mohamed Abdeen
1d0dc7cdd8
Postgres: Add support for text search types (#1889) 2025-06-22 09:02:51 +02:00
Simon Sawert
204d3b484d
Extend exception handling (#1884)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-21 08:12:07 +02:00
hulk
185a490218
Fix parsing error when having fields after nested struct in BigQuery (#1897)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-20 16:56:26 +02:00
hulk
b1b379e570
Add support of parsing struct field's options in BigQuery (#1890)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
Co-authored-by: Ifeanyi Ubah <ify1992@yahoo.com>
2025-06-18 07:00:53 +02:00
Andrew Lamb
be30697efb
Add license header check to CI (#1888)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-17 10:46:58 -04:00
36 changed files with 3104 additions and 673 deletions

39
.github/workflows/license.yml vendored Normal file
View file

@ -0,0 +1,39 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
name: license
# trigger for all PRs and changes to main
on:
push:
branches:
- main
pull_request:
jobs:
rat:
name: Release Audit Tool (RAT)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: 3.8
- name: Audit licenses
run: ./dev/release/run-rat.sh .

View file

@ -1,7 +1,8 @@
# Files to exclude from the Apache Rat (license) check
.gitignore
.tool-versions
target/*
**.gitignore
rat.txt
dev/release/rat_exclude_files.txt
fuzz/.gitignore
sqlparser_bench/img/flamegraph.svg
**Cargo.lock
filtered_rat.txt

View file

@ -63,7 +63,7 @@ $ cargo run --example cli - [--dialectname]
};
let contents = if filename == "-" {
println!("Parsing from stdin using {:?}", dialect);
println!("Parsing from stdin using {dialect:?}");
let mut buf = Vec::new();
stdin()
.read_to_end(&mut buf)

View file

@ -45,25 +45,24 @@ fn basic_queries(c: &mut Criterion) {
let large_statement = {
let expressions = (0..1000)
.map(|n| format!("FN_{}(COL_{})", n, n))
.map(|n| format!("FN_{n}(COL_{n})"))
.collect::<Vec<_>>()
.join(", ");
let tables = (0..1000)
.map(|n| format!("TABLE_{}", n))
.map(|n| format!("TABLE_{n}"))
.collect::<Vec<_>>()
.join(" JOIN ");
let where_condition = (0..1000)
.map(|n| format!("COL_{} = {}", n, n))
.map(|n| format!("COL_{n} = {n}"))
.collect::<Vec<_>>()
.join(" OR ");
let order_condition = (0..1000)
.map(|n| format!("COL_{} DESC", n))
.map(|n| format!("COL_{n} DESC"))
.collect::<Vec<_>>()
.join(", ");
format!(
"SELECT {} FROM {} WHERE {} ORDER BY {}",
expressions, tables, where_condition, order_condition
"SELECT {expressions} FROM {tables} WHERE {where_condition} ORDER BY {order_condition}"
)
};

View file

@ -446,6 +446,14 @@ pub enum DataType {
///
/// [PostgreSQL]: https://www.postgresql.org/docs/9.5/functions-geometry.html
GeometricType(GeometricTypeKind),
/// PostgreSQL text search vectors, see [PostgreSQL].
///
/// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-textsearch.html
TsVector,
/// PostgreSQL text search query, see [PostgreSQL].
///
/// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-textsearch.html
TsQuery,
}
impl fmt::Display for DataType {
@ -658,7 +666,7 @@ impl fmt::Display for DataType {
}
DataType::Enum(vals, bits) => {
match bits {
Some(bits) => write!(f, "ENUM{}", bits),
Some(bits) => write!(f, "ENUM{bits}"),
None => write!(f, "ENUM"),
}?;
write!(f, "(")?;
@ -706,16 +714,16 @@ impl fmt::Display for DataType {
}
// ClickHouse
DataType::Nullable(data_type) => {
write!(f, "Nullable({})", data_type)
write!(f, "Nullable({data_type})")
}
DataType::FixedString(character_length) => {
write!(f, "FixedString({})", character_length)
write!(f, "FixedString({character_length})")
}
DataType::LowCardinality(data_type) => {
write!(f, "LowCardinality({})", data_type)
write!(f, "LowCardinality({data_type})")
}
DataType::Map(key_data_type, value_data_type) => {
write!(f, "Map({}, {})", key_data_type, value_data_type)
write!(f, "Map({key_data_type}, {value_data_type})")
}
DataType::Tuple(fields) => {
write!(f, "Tuple({})", display_comma_separated(fields))
@ -737,7 +745,9 @@ impl fmt::Display for DataType {
DataType::NamedTable { name, columns } => {
write!(f, "{} TABLE ({})", name, display_comma_separated(columns))
}
DataType::GeometricType(kind) => write!(f, "{}", kind),
DataType::GeometricType(kind) => write!(f, "{kind}"),
DataType::TsVector => write!(f, "TSVECTOR"),
DataType::TsQuery => write!(f, "TSQUERY"),
}
}
}
@ -932,7 +942,7 @@ impl fmt::Display for CharacterLength {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
CharacterLength::IntegerLength { length, unit } => {
write!(f, "{}", length)?;
write!(f, "{length}")?;
if let Some(unit) = unit {
write!(f, " {unit}")?;
}
@ -987,7 +997,7 @@ impl fmt::Display for BinaryLength {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
BinaryLength::IntegerLength { length } => {
write!(f, "{}", length)?;
write!(f, "{length}")?;
}
BinaryLength::Max => {
write!(f, "MAX")?;

View file

@ -173,7 +173,7 @@ impl fmt::Display for AlterRoleOperation {
in_database,
} => {
if let Some(database_name) = in_database {
write!(f, "IN DATABASE {} ", database_name)?;
write!(f, "IN DATABASE {database_name} ")?;
}
match config_value {
@ -187,7 +187,7 @@ impl fmt::Display for AlterRoleOperation {
in_database,
} => {
if let Some(database_name) = in_database {
write!(f, "IN DATABASE {} ", database_name)?;
write!(f, "IN DATABASE {database_name} ")?;
}
match config_name {
@ -218,15 +218,15 @@ impl fmt::Display for Use {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("USE ")?;
match self {
Use::Catalog(name) => write!(f, "CATALOG {}", name),
Use::Schema(name) => write!(f, "SCHEMA {}", name),
Use::Database(name) => write!(f, "DATABASE {}", name),
Use::Warehouse(name) => write!(f, "WAREHOUSE {}", name),
Use::Role(name) => write!(f, "ROLE {}", name),
Use::Catalog(name) => write!(f, "CATALOG {name}"),
Use::Schema(name) => write!(f, "SCHEMA {name}"),
Use::Database(name) => write!(f, "DATABASE {name}"),
Use::Warehouse(name) => write!(f, "WAREHOUSE {name}"),
Use::Role(name) => write!(f, "ROLE {name}"),
Use::SecondaryRoles(secondary_roles) => {
write!(f, "SECONDARY ROLES {}", secondary_roles)
write!(f, "SECONDARY ROLES {secondary_roles}")
}
Use::Object(name) => write!(f, "{}", name),
Use::Object(name) => write!(f, "{name}"),
Use::Default => write!(f, "DEFAULT"),
}
}

View file

@ -30,11 +30,11 @@ use sqlparser_derive::{Visit, VisitMut};
use crate::ast::value::escape_single_quote_string;
use crate::ast::{
display_comma_separated, display_separated, CommentDef, CreateFunctionBody,
display_comma_separated, display_separated, ArgMode, CommentDef, CreateFunctionBody,
CreateFunctionUsing, DataType, Expr, FunctionBehavior, FunctionCalledOnNull,
FunctionDeterminismSpecifier, FunctionParallel, Ident, MySQLColumnPosition, ObjectName,
OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag, Value,
ValueWithSpan,
FunctionDeterminismSpecifier, FunctionParallel, Ident, IndexColumn, MySQLColumnPosition,
ObjectName, OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag,
Value, ValueWithSpan,
};
use crate::keywords::Keyword;
use crate::tokenizer::Token;
@ -57,7 +57,7 @@ impl fmt::Display for ReplicaIdentity {
ReplicaIdentity::None => f.write_str("NONE"),
ReplicaIdentity::Full => f.write_str("FULL"),
ReplicaIdentity::Default => f.write_str("DEFAULT"),
ReplicaIdentity::Index(idx) => write!(f, "USING INDEX {}", idx),
ReplicaIdentity::Index(idx) => write!(f, "USING INDEX {idx}"),
}
}
}
@ -67,8 +67,11 @@ impl fmt::Display for ReplicaIdentity {
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub enum AlterTableOperation {
/// `ADD <table_constraint>`
AddConstraint(TableConstraint),
/// `ADD <table_constraint> [NOT VALID]`
AddConstraint {
constraint: TableConstraint,
not_valid: bool,
},
/// `ADD [COLUMN] [IF NOT EXISTS] <column_def>`
AddColumn {
/// `[COLUMN]`.
@ -137,10 +140,10 @@ pub enum AlterTableOperation {
name: Ident,
drop_behavior: Option<DropBehavior>,
},
/// `DROP [ COLUMN ] [ IF EXISTS ] <column_name> [ CASCADE ]`
/// `DROP [ COLUMN ] [ IF EXISTS ] <column_name> [ , <column_name>, ... ] [ CASCADE ]`
DropColumn {
has_column_keyword: bool,
column_name: Ident,
column_names: Vec<Ident>,
if_exists: bool,
drop_behavior: Option<DropBehavior>,
},
@ -344,6 +347,10 @@ pub enum AlterTableOperation {
equals: bool,
value: ValueWithSpan,
},
/// `VALIDATE CONSTRAINT <name>`
ValidateConstraint {
name: Ident,
},
}
/// An `ALTER Policy` (`Statement::AlterPolicy`) operation
@ -450,7 +457,7 @@ pub enum Owner {
impl fmt::Display for Owner {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Owner::Ident(ident) => write!(f, "{}", ident),
Owner::Ident(ident) => write!(f, "{ident}"),
Owner::CurrentRole => write!(f, "CURRENT_ROLE"),
Owner::CurrentUser => write!(f, "CURRENT_USER"),
Owner::SessionUser => write!(f, "SESSION_USER"),
@ -494,7 +501,16 @@ impl fmt::Display for AlterTableOperation {
display_separated(new_partitions, " "),
ine = if *if_not_exists { " IF NOT EXISTS" } else { "" }
),
AlterTableOperation::AddConstraint(c) => write!(f, "ADD {c}"),
AlterTableOperation::AddConstraint {
not_valid,
constraint,
} => {
write!(f, "ADD {constraint}")?;
if *not_valid {
write!(f, " NOT VALID")?;
}
Ok(())
}
AlterTableOperation::AddColumn {
column_keyword,
if_not_exists,
@ -525,7 +541,7 @@ impl fmt::Display for AlterTableOperation {
if *if_not_exists {
write!(f, " IF NOT EXISTS")?;
}
write!(f, " {} ({})", name, query)
write!(f, " {name} ({query})")
}
AlterTableOperation::Algorithm { equals, algorithm } => {
write!(
@ -540,7 +556,7 @@ impl fmt::Display for AlterTableOperation {
if *if_exists {
write!(f, " IF EXISTS")?;
}
write!(f, " {}", name)
write!(f, " {name}")
}
AlterTableOperation::MaterializeProjection {
if_exists,
@ -551,9 +567,9 @@ impl fmt::Display for AlterTableOperation {
if *if_exists {
write!(f, " IF EXISTS")?;
}
write!(f, " {}", name)?;
write!(f, " {name}")?;
if let Some(partition) = partition {
write!(f, " IN PARTITION {}", partition)?;
write!(f, " IN PARTITION {partition}")?;
}
Ok(())
}
@ -566,9 +582,9 @@ impl fmt::Display for AlterTableOperation {
if *if_exists {
write!(f, " IF EXISTS")?;
}
write!(f, " {}", name)?;
write!(f, " {name}")?;
if let Some(partition) = partition {
write!(f, " IN PARTITION {}", partition)?;
write!(f, " IN PARTITION {partition}")?;
}
Ok(())
}
@ -615,7 +631,7 @@ impl fmt::Display for AlterTableOperation {
AlterTableOperation::DropIndex { name } => write!(f, "DROP INDEX {name}"),
AlterTableOperation::DropColumn {
has_column_keyword,
column_name,
column_names: column_name,
if_exists,
drop_behavior,
} => write!(
@ -623,7 +639,7 @@ impl fmt::Display for AlterTableOperation {
"DROP {}{}{}{}",
if *has_column_keyword { "COLUMN " } else { "" },
if *if_exists { "IF EXISTS " } else { "" },
column_name,
display_comma_separated(column_name),
match drop_behavior {
None => "",
Some(DropBehavior::Restrict) => " RESTRICT",
@ -772,6 +788,9 @@ impl fmt::Display for AlterTableOperation {
AlterTableOperation::ReplicaIdentity { identity } => {
write!(f, "REPLICA IDENTITY {identity}")
}
AlterTableOperation::ValidateConstraint { name } => {
write!(f, "VALIDATE CONSTRAINT {name}")
}
}
}
}
@ -893,7 +912,10 @@ pub enum AlterColumnOperation {
data_type: DataType,
/// PostgreSQL specific
using: Option<Expr>,
/// Set to true if the statement includes the `SET DATA TYPE` keywords
had_set: bool,
},
/// `ADD GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( sequence_options ) ]`
///
/// Note: this is a PostgreSQL-specific operation.
@ -914,12 +936,19 @@ impl fmt::Display for AlterColumnOperation {
AlterColumnOperation::DropDefault => {
write!(f, "DROP DEFAULT")
}
AlterColumnOperation::SetDataType { data_type, using } => {
if let Some(expr) = using {
write!(f, "SET DATA TYPE {data_type} USING {expr}")
} else {
write!(f, "SET DATA TYPE {data_type}")
AlterColumnOperation::SetDataType {
data_type,
using,
had_set,
} => {
if *had_set {
write!(f, "SET DATA ")?;
}
write!(f, "TYPE {data_type}")?;
if let Some(expr) = using {
write!(f, " USING {expr}")?;
}
Ok(())
}
AlterColumnOperation::AddGenerated {
generated_as,
@ -979,7 +1008,7 @@ pub enum TableConstraint {
/// [1]: IndexType
index_type: Option<IndexType>,
/// Identifiers of the columns that are unique.
columns: Vec<Ident>,
columns: Vec<IndexColumn>,
index_options: Vec<IndexOption>,
characteristics: Option<ConstraintCharacteristics>,
/// Optional Postgres nulls handling: `[ NULLS [ NOT ] DISTINCT ]`
@ -1015,7 +1044,7 @@ pub enum TableConstraint {
/// [1]: IndexType
index_type: Option<IndexType>,
/// Identifiers of the columns that form the primary key.
columns: Vec<Ident>,
columns: Vec<IndexColumn>,
index_options: Vec<IndexOption>,
characteristics: Option<ConstraintCharacteristics>,
},
@ -1060,7 +1089,7 @@ pub enum TableConstraint {
/// [1]: IndexType
index_type: Option<IndexType>,
/// Referred column identifier list.
columns: Vec<Ident>,
columns: Vec<IndexColumn>,
},
/// MySQLs [fulltext][1] definition. Since the [`SPATIAL`][2] definition is exactly the same,
/// and MySQL displays both the same way, it is part of this definition as well.
@ -1083,7 +1112,7 @@ pub enum TableConstraint {
/// Optional index name.
opt_index_name: Option<Ident>,
/// Referred column identifier list.
columns: Vec<Ident>,
columns: Vec<IndexColumn>,
},
}
@ -1168,7 +1197,7 @@ impl fmt::Display for TableConstraint {
write!(f, " ON UPDATE {action}")?;
}
if let Some(characteristics) = characteristics {
write!(f, " {}", characteristics)?;
write!(f, " {characteristics}")?;
}
Ok(())
}
@ -1308,7 +1337,7 @@ impl fmt::Display for IndexType {
Self::SPGiST => write!(f, "SPGIST"),
Self::BRIN => write!(f, "BRIN"),
Self::Bloom => write!(f, "BLOOM"),
Self::Custom(name) => write!(f, "{}", name),
Self::Custom(name) => write!(f, "{name}"),
}
}
}
@ -1367,12 +1396,17 @@ impl fmt::Display for NullsDistinctOption {
pub struct ProcedureParam {
pub name: Ident,
pub data_type: DataType,
pub mode: Option<ArgMode>,
}
impl fmt::Display for ProcedureParam {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(mode) = &self.mode {
write!(f, "{mode} {} {}", self.name, self.data_type)
} else {
write!(f, "{} {}", self.name, self.data_type)
}
}
}
/// SQL column definition
@ -1421,17 +1455,41 @@ impl fmt::Display for ColumnDef {
pub struct ViewColumnDef {
pub name: Ident,
pub data_type: Option<DataType>,
pub options: Option<Vec<ColumnOption>>,
pub options: Option<ColumnOptions>,
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub enum ColumnOptions {
CommaSeparated(Vec<ColumnOption>),
SpaceSeparated(Vec<ColumnOption>),
}
impl ColumnOptions {
pub fn as_slice(&self) -> &[ColumnOption] {
match self {
ColumnOptions::CommaSeparated(options) => options.as_slice(),
ColumnOptions::SpaceSeparated(options) => options.as_slice(),
}
}
}
impl fmt::Display for ViewColumnDef {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.name)?;
if let Some(data_type) = self.data_type.as_ref() {
write!(f, " {}", data_type)?;
write!(f, " {data_type}")?;
}
if let Some(options) = self.options.as_ref() {
write!(f, " {}", display_comma_separated(options.as_slice()))?;
match options {
ColumnOptions::CommaSeparated(column_options) => {
write!(f, " {}", display_comma_separated(column_options.as_slice()))?;
}
ColumnOptions::SpaceSeparated(column_options) => {
write!(f, " {}", display_separated(column_options.as_slice(), " "))?
}
}
}
Ok(())
}
@ -1651,7 +1709,7 @@ pub struct ColumnPolicyProperty {
/// ```
/// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table
pub with: bool,
pub policy_name: Ident,
pub policy_name: ObjectName,
pub using_columns: Option<Vec<Ident>>,
}
@ -1816,7 +1874,7 @@ impl fmt::Display for ColumnOption {
} => {
write!(f, "{}", if *is_primary { "PRIMARY KEY" } else { "UNIQUE" })?;
if let Some(characteristics) = characteristics {
write!(f, " {}", characteristics)?;
write!(f, " {characteristics}")?;
}
Ok(())
}
@ -1838,7 +1896,7 @@ impl fmt::Display for ColumnOption {
write!(f, " ON UPDATE {action}")?;
}
if let Some(characteristics) = characteristics {
write!(f, " {}", characteristics)?;
write!(f, " {characteristics}")?;
}
Ok(())
}
@ -1898,7 +1956,7 @@ impl fmt::Display for ColumnOption {
write!(f, "{parameters}")
}
OnConflict(keyword) => {
write!(f, "ON CONFLICT {:?}", keyword)?;
write!(f, "ON CONFLICT {keyword:?}")?;
Ok(())
}
Policy(parameters) => {

View file

@ -55,7 +55,7 @@ impl Display for IndexColumn {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.column)?;
if let Some(operator_class) = &self.operator_class {
write!(f, " {}", operator_class)?;
write!(f, " {operator_class}")?;
}
Ok(())
}
@ -266,7 +266,7 @@ impl Display for CreateTable {
name = self.name,
)?;
if let Some(on_cluster) = &self.on_cluster {
write!(f, " ON CLUSTER {}", on_cluster)?;
write!(f, " ON CLUSTER {on_cluster}")?;
}
if !self.columns.is_empty() || !self.constraints.is_empty() {
f.write_str(" (")?;
@ -383,15 +383,15 @@ impl Display for CreateTable {
match &self.table_options {
options @ CreateTableOptions::With(_)
| options @ CreateTableOptions::Plain(_)
| options @ CreateTableOptions::TableProperties(_) => write!(f, " {}", options)?,
| options @ CreateTableOptions::TableProperties(_) => write!(f, " {options}")?,
_ => (),
}
if let Some(primary_key) = &self.primary_key {
write!(f, " PRIMARY KEY {}", primary_key)?;
write!(f, " PRIMARY KEY {primary_key}")?;
}
if let Some(order_by) = &self.order_by {
write!(f, " ORDER BY {}", order_by)?;
write!(f, " ORDER BY {order_by}")?;
}
if let Some(inherits) = &self.inherits {
write!(f, " INHERITS ({})", display_comma_separated(inherits))?;
@ -403,7 +403,7 @@ impl Display for CreateTable {
write!(f, " CLUSTER BY {cluster_by}")?;
}
if let options @ CreateTableOptions::Options(_) = &self.table_options {
write!(f, " {}", options)?;
write!(f, " {options}")?;
}
if let Some(external_volume) = self.external_volume.as_ref() {
write!(f, " EXTERNAL_VOLUME = '{external_volume}'")?;

View file

@ -67,7 +67,7 @@ impl fmt::Display for KeyValueOptions {
} else {
f.write_str(" ")?;
}
write!(f, "{}", option)?;
write!(f, "{option}")?;
}
}
Ok(())

View file

@ -28,6 +28,7 @@ use helpers::{
stmt_data_loading::{FileStagingCommand, StageLoadSelectItemKind},
};
use core::cmp::Ordering;
use core::ops::Deref;
use core::{
fmt::{self, Display},
@ -60,13 +61,14 @@ pub use self::ddl::{
AlterColumnOperation, AlterConnectorOwner, AlterIndexOperation, AlterPolicyOperation,
AlterTableAlgorithm, AlterTableLock, AlterTableOperation, AlterType, AlterTypeAddValue,
AlterTypeAddValuePosition, AlterTypeOperation, AlterTypeRename, AlterTypeRenameValue,
ClusteredBy, ColumnDef, ColumnOption, ColumnOptionDef, ColumnPolicy, ColumnPolicyProperty,
ConstraintCharacteristics, CreateConnector, CreateDomain, CreateFunction, Deduplicate,
DeferrableInitial, DropBehavior, GeneratedAs, GeneratedExpressionMode, IdentityParameters,
IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind, IdentityPropertyOrder,
IndexOption, IndexType, KeyOrIndexDisplay, NullsDistinctOption, Owner, Partition,
ProcedureParam, ReferentialAction, ReplicaIdentity, TableConstraint, TagsColumnOption,
UserDefinedTypeCompositeAttributeDef, UserDefinedTypeRepresentation, ViewColumnDef,
ClusteredBy, ColumnDef, ColumnOption, ColumnOptionDef, ColumnOptions, ColumnPolicy,
ColumnPolicyProperty, ConstraintCharacteristics, CreateConnector, CreateDomain, CreateFunction,
Deduplicate, DeferrableInitial, DropBehavior, GeneratedAs, GeneratedExpressionMode,
IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind,
IdentityPropertyOrder, IndexOption, IndexType, KeyOrIndexDisplay, NullsDistinctOption, Owner,
Partition, ProcedureParam, ReferentialAction, ReplicaIdentity, TableConstraint,
TagsColumnOption, UserDefinedTypeCompositeAttributeDef, UserDefinedTypeRepresentation,
ViewColumnDef,
};
pub use self::dml::{CreateIndex, CreateTable, Delete, IndexColumn, Insert};
pub use self::operator::{BinaryOperator, UnaryOperator};
@ -172,7 +174,7 @@ fn format_statement_list(f: &mut fmt::Formatter, statements: &[Statement]) -> fm
}
/// An identifier, decomposed into its value or character data and the quote style.
#[derive(Debug, Clone, PartialOrd, Ord)]
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct Ident {
@ -214,6 +216,35 @@ impl core::hash::Hash for Ident {
impl Eq for Ident {}
impl PartialOrd for Ident {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Ident {
fn cmp(&self, other: &Self) -> Ordering {
let Ident {
value,
quote_style,
// exhaustiveness check; we ignore spans in ordering
span: _,
} = self;
let Ident {
value: other_value,
quote_style: other_quote_style,
// exhaustiveness check; we ignore spans in ordering
span: _,
} = other;
// First compare by value, then by quote_style
value
.cmp(other_value)
.then_with(|| quote_style.cmp(other_quote_style))
}
}
impl Ident {
/// Create a new identifier with the given value and no quotes and an empty span.
pub fn new<S>(value: S) -> Self
@ -326,7 +357,7 @@ impl ObjectNamePart {
impl fmt::Display for ObjectNamePart {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ObjectNamePart::Identifier(ident) => write!(f, "{}", ident),
ObjectNamePart::Identifier(ident) => write!(f, "{ident}"),
}
}
}
@ -428,14 +459,22 @@ impl fmt::Display for Interval {
pub struct StructField {
pub field_name: Option<Ident>,
pub field_type: DataType,
/// Struct field options.
/// See [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#column_name_and_column_schema)
pub options: Option<Vec<SqlOption>>,
}
impl fmt::Display for StructField {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(name) = &self.field_name {
write!(f, "{name} {}", self.field_type)
write!(f, "{name} {}", self.field_type)?;
} else {
write!(f, "{}", self.field_type)
write!(f, "{}", self.field_type)?;
}
if let Some(options) = &self.options {
write!(f, " OPTIONS({})", display_separated(options, ", "))
} else {
Ok(())
}
}
}
@ -740,7 +779,7 @@ pub enum Expr {
/// `[ NOT ] IN (SELECT ...)`
InSubquery {
expr: Box<Expr>,
subquery: Box<SetExpr>,
subquery: Box<Query>,
negated: bool,
},
/// `[ NOT ] IN UNNEST(array_expression)`
@ -770,7 +809,7 @@ pub enum Expr {
any: bool,
expr: Box<Expr>,
pattern: Box<Expr>,
escape_char: Option<String>,
escape_char: Option<Value>,
},
/// `ILIKE` (case-insensitive `LIKE`)
ILike {
@ -780,14 +819,14 @@ pub enum Expr {
any: bool,
expr: Box<Expr>,
pattern: Box<Expr>,
escape_char: Option<String>,
escape_char: Option<Value>,
},
/// SIMILAR TO regex
SimilarTo {
negated: bool,
expr: Box<Expr>,
pattern: Box<Expr>,
escape_char: Option<String>,
escape_char: Option<Value>,
},
/// MySQL: RLIKE regex or REGEXP regex
RLike {
@ -957,7 +996,7 @@ pub enum Expr {
data_type: DataType,
/// The value of the constant.
/// Hint: you can unwrap the string value using `value.into_string()`.
value: Value,
value: ValueWithSpan,
},
/// Scalar function call e.g. `LEFT(foo, 5)`
Function(Function),
@ -1085,6 +1124,8 @@ pub enum Expr {
/// [Databricks](https://docs.databricks.com/en/sql/language-manual/sql-ref-lambda-functions.html)
/// [DuckDb](https://duckdb.org/docs/sql/functions/lambda.html)
Lambda(LambdaFunction),
/// Checks membership of a value in a JSON array
MemberOf(MemberOf),
}
impl Expr {
@ -1171,8 +1212,8 @@ pub enum AccessExpr {
impl fmt::Display for AccessExpr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AccessExpr::Dot(expr) => write!(f, ".{}", expr),
AccessExpr::Subscript(subscript) => write!(f, "[{}]", subscript),
AccessExpr::Dot(expr) => write!(f, ".{expr}"),
AccessExpr::Subscript(subscript) => write!(f, "[{subscript}]"),
}
}
}
@ -1374,12 +1415,12 @@ impl fmt::Display for Expr {
match self {
Expr::Identifier(s) => write!(f, "{s}"),
Expr::Wildcard(_) => f.write_str("*"),
Expr::QualifiedWildcard(prefix, _) => write!(f, "{}.*", prefix),
Expr::QualifiedWildcard(prefix, _) => write!(f, "{prefix}.*"),
Expr::CompoundIdentifier(s) => write!(f, "{}", display_separated(s, ".")),
Expr::CompoundFieldAccess { root, access_chain } => {
write!(f, "{}", root)?;
write!(f, "{root}")?;
for field in access_chain {
write!(f, "{}", field)?;
write!(f, "{field}")?;
}
Ok(())
}
@ -1447,7 +1488,7 @@ impl fmt::Display for Expr {
} => match escape_char {
Some(ch) => write!(
f,
"{} {}LIKE {}{} ESCAPE '{}'",
"{} {}LIKE {}{} ESCAPE {}",
expr,
if *negated { "NOT " } else { "" },
if *any { "ANY " } else { "" },
@ -1472,7 +1513,7 @@ impl fmt::Display for Expr {
} => match escape_char {
Some(ch) => write!(
f,
"{} {}ILIKE {}{} ESCAPE '{}'",
"{} {}ILIKE {}{} ESCAPE {}",
expr,
if *negated { "NOT " } else { "" },
if *any { "ANY" } else { "" },
@ -1508,7 +1549,7 @@ impl fmt::Display for Expr {
} => {
let not_ = if *negated { "NOT " } else { "" };
if form.is_none() {
write!(f, "{} IS {}NORMALIZED", expr, not_)
write!(f, "{expr} IS {not_}NORMALIZED")
} else {
write!(
f,
@ -1527,7 +1568,7 @@ impl fmt::Display for Expr {
} => match escape_char {
Some(ch) => write!(
f,
"{} {}SIMILAR TO {} ESCAPE '{}'",
"{} {}SIMILAR TO {} ESCAPE {}",
expr,
if *negated { "NOT " } else { "" },
pattern,
@ -1830,7 +1871,7 @@ impl fmt::Display for Expr {
}
}
Expr::Named { expr, name } => {
write!(f, "{} AS {}", expr, name)
write!(f, "{expr} AS {name}")
}
Expr::Dictionary(fields) => {
write!(f, "{{{}}}", display_comma_separated(fields))
@ -1873,6 +1914,7 @@ impl fmt::Display for Expr {
}
Expr::Prior(expr) => write!(f, "PRIOR {expr}"),
Expr::Lambda(lambda) => write!(f, "{lambda}"),
Expr::MemberOf(member_of) => write!(f, "{member_of}"),
}
}
}
@ -2386,7 +2428,7 @@ impl fmt::Display for ConditionalStatements {
}
Ok(())
}
ConditionalStatements::BeginEnd(bes) => write!(f, "{}", bes),
ConditionalStatements::BeginEnd(bes) => write!(f, "{bes}"),
}
}
}
@ -2906,9 +2948,7 @@ impl Display for Set {
write!(
f,
"SET {modifier}ROLE {role_name}",
modifier = context_modifier
.map(|m| format!("{}", m))
.unwrap_or_default()
modifier = context_modifier.map(|m| format!("{m}")).unwrap_or_default()
)
}
Self::SetSessionParam(kind) => write!(f, "SET {kind}"),
@ -2941,7 +2981,7 @@ impl Display for Set {
charset_name,
collation_name,
} => {
write!(f, "SET NAMES {}", charset_name)?;
write!(f, "SET NAMES {charset_name}")?;
if let Some(collation) = collation_name {
f.write_str(" COLLATE ")?;
@ -2964,7 +3004,7 @@ impl Display for Set {
write!(
f,
"SET {}{}{} = {}",
scope.map(|s| format!("{}", s)).unwrap_or_default(),
scope.map(|s| format!("{s}")).unwrap_or_default(),
if *hivevar { "HIVEVAR:" } else { "" },
variable,
display_comma_separated(values)
@ -2982,6 +3022,36 @@ impl From<Set> for Statement {
}
}
/// A representation of a `WHEN` arm with all the identifiers catched and the statements to execute
/// for the arm.
///
/// Snowflake: <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/exception>
/// BigQuery: <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend>
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct ExceptionWhen {
pub idents: Vec<Ident>,
pub statements: Vec<Statement>,
}
impl Display for ExceptionWhen {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"WHEN {idents} THEN",
idents = display_separated(&self.idents, " OR ")
)?;
if !self.statements.is_empty() {
write!(f, " ")?;
format_statement_list(f, &self.statements)?;
}
Ok(())
}
}
/// A top-level statement (SELECT, INSERT, CREATE, etc.)
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
@ -3248,6 +3318,8 @@ pub enum Statement {
secret_type: Ident,
options: Vec<SecretOption>,
},
/// A `CREATE SERVER` statement.
CreateServer(CreateServerStatement),
/// ```sql
/// CREATE POLICY
/// ```
@ -3670,17 +3742,20 @@ pub enum Statement {
/// END;
/// ```
statements: Vec<Statement>,
/// Statements of an exception clause.
/// Exception handling with exception clauses.
/// Example:
/// ```sql
/// BEGIN
/// SELECT 1;
/// EXCEPTION WHEN ERROR THEN
/// EXCEPTION
/// WHEN EXCEPTION_1 THEN
/// SELECT 2;
/// WHEN EXCEPTION_2 OR EXCEPTION_3 THEN
/// SELECT 3;
/// END;
/// WHEN OTHER THEN
/// SELECT 4;
/// ```
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend>
exception_statements: Option<Vec<Statement>>,
/// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/exception>
exception: Option<Vec<ExceptionWhen>>,
/// TRUE if the statement has an `END` keyword.
has_end_keyword: bool,
},
@ -3873,6 +3948,7 @@ pub enum Statement {
or_alter: bool,
name: ObjectName,
params: Option<Vec<ProcedureParam>>,
language: Option<Ident>,
body: ConditionalStatements,
},
/// ```sql
@ -3920,6 +3996,7 @@ pub enum Statement {
with_grant_option: bool,
as_grantor: Option<Ident>,
granted_by: Option<Ident>,
current_grants: Option<CurrentGrantsKind>,
},
/// ```sql
/// DENY privileges ON object TO grantees
@ -4173,7 +4250,7 @@ pub enum Statement {
/// ```sql
/// NOTIFY channel [ , payload ]
/// ```
/// send a notification event together with an optional “payload” string to channel
/// send a notification event together with an optional "payload" string to channel
///
/// See Postgres <https://www.postgresql.org/docs/current/sql-notify.html>
NOTIFY {
@ -4236,6 +4313,28 @@ pub enum Statement {
Return(ReturnStatement),
}
/// ```sql
/// {COPY | REVOKE} CURRENT GRANTS
/// ```
///
/// - [Snowflake](https://docs.snowflake.com/en/sql-reference/sql/grant-ownership#optional-parameters)
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub enum CurrentGrantsKind {
CopyCurrentGrants,
RevokeCurrentGrants,
}
impl fmt::Display for CurrentGrantsKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
CurrentGrantsKind::CopyCurrentGrants => write!(f, "COPY CURRENT GRANTS"),
CurrentGrantsKind::RevokeCurrentGrants => write!(f, "REVOKE CURRENT GRANTS"),
}
}
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
@ -4332,7 +4431,7 @@ impl fmt::Display for Statement {
write!(f, "{describe_alias} ")?;
if let Some(format) = hive_format {
write!(f, "{} ", format)?;
write!(f, "{format} ")?;
}
if *has_table_keyword {
write!(f, "TABLE ")?;
@ -4776,6 +4875,7 @@ impl fmt::Display for Statement {
name,
or_alter,
params,
language,
body,
} => {
write!(
@ -4791,6 +4891,10 @@ impl fmt::Display for Statement {
}
}
if let Some(language) = language {
write!(f, " LANGUAGE {language}")?;
}
write!(f, " AS {body}")
}
Statement::CreateMacro {
@ -5099,6 +5203,9 @@ impl fmt::Display for Statement {
write!(f, " )")?;
Ok(())
}
Statement::CreateServer(stmt) => {
write!(f, "{stmt}")
}
Statement::CreatePolicy {
name,
table_name,
@ -5163,7 +5270,7 @@ impl fmt::Display for Statement {
if *only {
write!(f, "ONLY ")?;
}
write!(f, "{name} ", name = name)?;
write!(f, "{name} ")?;
if let Some(cluster) = on_cluster {
write!(f, "ON CLUSTER {cluster} ")?;
}
@ -5241,7 +5348,7 @@ impl fmt::Display for Statement {
)?;
if !session_params.options.is_empty() {
if *set {
write!(f, " {}", session_params)?;
write!(f, " {session_params}")?;
} else {
let options = session_params
.options
@ -5275,7 +5382,7 @@ impl fmt::Display for Statement {
if *purge { " PURGE" } else { "" },
)?;
if let Some(table_name) = table.as_ref() {
write!(f, " ON {}", table_name)?;
write!(f, " ON {table_name}")?;
};
Ok(())
}
@ -5525,12 +5632,12 @@ impl fmt::Display for Statement {
transaction,
modifier,
statements,
exception_statements,
exception,
has_end_keyword,
} => {
if *syntax_begin {
if let Some(modifier) = *modifier {
write!(f, "BEGIN {}", modifier)?;
write!(f, "BEGIN {modifier}")?;
} else {
write!(f, "BEGIN")?;
}
@ -5547,11 +5654,10 @@ impl fmt::Display for Statement {
write!(f, " ")?;
format_statement_list(f, statements)?;
}
if let Some(exception_statements) = exception_statements {
write!(f, " EXCEPTION WHEN ERROR THEN")?;
if !exception_statements.is_empty() {
write!(f, " ")?;
format_statement_list(f, exception_statements)?;
if let Some(exception_when) = exception {
write!(f, " EXCEPTION")?;
for when in exception_when {
write!(f, " {when}")?;
}
}
if *has_end_keyword {
@ -5567,7 +5673,7 @@ impl fmt::Display for Statement {
if *end_syntax {
write!(f, "END")?;
if let Some(modifier) = *modifier {
write!(f, " {}", modifier)?;
write!(f, " {modifier}")?;
}
if *chain {
write!(f, " AND CHAIN")?;
@ -5632,6 +5738,7 @@ impl fmt::Display for Statement {
with_grant_option,
as_grantor,
granted_by,
current_grants,
} => {
write!(f, "GRANT {privileges} ")?;
if let Some(objects) = objects {
@ -5641,6 +5748,9 @@ impl fmt::Display for Statement {
if *with_grant_option {
write!(f, " WITH GRANT OPTION")?;
}
if let Some(current_grants) = current_grants {
write!(f, " {current_grants}")?;
}
if let Some(grantor) = as_grantor {
write!(f, " AS {grantor}")?;
}
@ -5666,7 +5776,7 @@ impl fmt::Display for Statement {
write!(f, " GRANTED BY {grantor}")?;
}
if let Some(cascade) = cascade {
write!(f, " {}", cascade)?;
write!(f, " {cascade}")?;
}
Ok(())
}
@ -5845,13 +5955,13 @@ impl fmt::Display for Statement {
if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" },
)?;
if !directory_table_params.options.is_empty() {
write!(f, " DIRECTORY=({})", directory_table_params)?;
write!(f, " DIRECTORY=({directory_table_params})")?;
}
if !file_format.options.is_empty() {
write!(f, " FILE_FORMAT=({})", file_format)?;
write!(f, " FILE_FORMAT=({file_format})")?;
}
if !copy_options.options.is_empty() {
write!(f, " COPY_OPTIONS=({})", copy_options)?;
write!(f, " COPY_OPTIONS=({copy_options})")?;
}
if comment.is_some() {
write!(f, " COMMENT='{}'", comment.as_ref().unwrap())?;
@ -5874,7 +5984,7 @@ impl fmt::Display for Statement {
validation_mode,
partition,
} => {
write!(f, "COPY INTO {}", into)?;
write!(f, "COPY INTO {into}")?;
if let Some(into_columns) = into_columns {
write!(f, " ({})", display_comma_separated(into_columns))?;
}
@ -5890,12 +6000,12 @@ impl fmt::Display for Statement {
)?;
}
if let Some(from_obj_alias) = from_obj_alias {
write!(f, " AS {}", from_obj_alias)?;
write!(f, " AS {from_obj_alias}")?;
}
write!(f, ")")?;
} else if let Some(from_obj) = from_obj {
// Standard data load
write!(f, " FROM {}{}", from_obj, stage_params)?;
write!(f, " FROM {from_obj}{stage_params}")?;
if let Some(from_obj_alias) = from_obj_alias {
write!(f, " AS {from_obj_alias}")?;
}
@ -5908,24 +6018,24 @@ impl fmt::Display for Statement {
write!(f, " FILES = ('{}')", display_separated(files, "', '"))?;
}
if let Some(pattern) = pattern {
write!(f, " PATTERN = '{}'", pattern)?;
write!(f, " PATTERN = '{pattern}'")?;
}
if let Some(partition) = partition {
write!(f, " PARTITION BY {partition}")?;
}
if !file_format.options.is_empty() {
write!(f, " FILE_FORMAT=({})", file_format)?;
write!(f, " FILE_FORMAT=({file_format})")?;
}
if !copy_options.options.is_empty() {
match kind {
CopyIntoSnowflakeKind::Table => {
write!(f, " COPY_OPTIONS=({})", copy_options)?
write!(f, " COPY_OPTIONS=({copy_options})")?
}
CopyIntoSnowflakeKind::Location => write!(f, " {copy_options}")?,
}
}
if let Some(validation_mode) = validation_mode {
write!(f, " VALIDATION_MODE = {}", validation_mode)?;
write!(f, " VALIDATION_MODE = {validation_mode}")?;
}
Ok(())
}
@ -5971,10 +6081,10 @@ impl fmt::Display for Statement {
} => {
write!(f, "OPTIMIZE TABLE {name}")?;
if let Some(on_cluster) = on_cluster {
write!(f, " ON CLUSTER {on_cluster}", on_cluster = on_cluster)?;
write!(f, " ON CLUSTER {on_cluster}")?;
}
if let Some(partition) = partition {
write!(f, " {partition}", partition = partition)?;
write!(f, " {partition}")?;
}
if *include_final {
write!(f, " FINAL")?;
@ -6101,7 +6211,7 @@ impl fmt::Display for SetAssignment {
write!(
f,
"{}{} = {}",
self.scope.map(|s| format!("{}", s)).unwrap_or_default(),
self.scope.map(|s| format!("{s}")).unwrap_or_default(),
self.name,
self.value
)
@ -6830,7 +6940,7 @@ impl fmt::Display for GranteeName {
match self {
GranteeName::ObjectName(name) => name.fmt(f),
GranteeName::UserHost { user, host } => {
write!(f, "{}@{}", user, host)
write!(f, "{user}@{host}")
}
}
}
@ -6845,6 +6955,24 @@ pub enum GrantObjects {
AllSequencesInSchema { schemas: Vec<ObjectName> },
/// Grant privileges on `ALL TABLES IN SCHEMA <schema_name> [, ...]`
AllTablesInSchema { schemas: Vec<ObjectName> },
/// Grant privileges on `ALL VIEWS IN SCHEMA <schema_name> [, ...]`
AllViewsInSchema { schemas: Vec<ObjectName> },
/// Grant privileges on `ALL MATERIALIZED VIEWS IN SCHEMA <schema_name> [, ...]`
AllMaterializedViewsInSchema { schemas: Vec<ObjectName> },
/// Grant privileges on `ALL EXTERNAL TABLES IN SCHEMA <schema_name> [, ...]`
AllExternalTablesInSchema { schemas: Vec<ObjectName> },
/// Grant privileges on `FUTURE SCHEMAS IN DATABASE <database_name> [, ...]`
FutureSchemasInDatabase { databases: Vec<ObjectName> },
/// Grant privileges on `FUTURE TABLES IN SCHEMA <schema_name> [, ...]`
FutureTablesInSchema { schemas: Vec<ObjectName> },
/// Grant privileges on `FUTURE VIEWS IN SCHEMA <schema_name> [, ...]`
FutureViewsInSchema { schemas: Vec<ObjectName> },
/// Grant privileges on `FUTURE EXTERNAL TABLES IN SCHEMA <schema_name> [, ...]`
FutureExternalTablesInSchema { schemas: Vec<ObjectName> },
/// Grant privileges on `FUTURE MATERIALIZED VIEWS IN SCHEMA <schema_name> [, ...]`
FutureMaterializedViewsInSchema { schemas: Vec<ObjectName> },
/// Grant privileges on `FUTURE SEQUENCES IN SCHEMA <schema_name> [, ...]`
FutureSequencesInSchema { schemas: Vec<ObjectName> },
/// Grant privileges on specific databases
Databases(Vec<ObjectName>),
/// Grant privileges on specific schemas
@ -6913,6 +7041,69 @@ impl fmt::Display for GrantObjects {
display_comma_separated(schemas)
)
}
GrantObjects::AllExternalTablesInSchema { schemas } => {
write!(
f,
"ALL EXTERNAL TABLES IN SCHEMA {}",
display_comma_separated(schemas)
)
}
GrantObjects::AllViewsInSchema { schemas } => {
write!(
f,
"ALL VIEWS IN SCHEMA {}",
display_comma_separated(schemas)
)
}
GrantObjects::AllMaterializedViewsInSchema { schemas } => {
write!(
f,
"ALL MATERIALIZED VIEWS IN SCHEMA {}",
display_comma_separated(schemas)
)
}
GrantObjects::FutureSchemasInDatabase { databases } => {
write!(
f,
"FUTURE SCHEMAS IN DATABASE {}",
display_comma_separated(databases)
)
}
GrantObjects::FutureTablesInSchema { schemas } => {
write!(
f,
"FUTURE TABLES IN SCHEMA {}",
display_comma_separated(schemas)
)
}
GrantObjects::FutureExternalTablesInSchema { schemas } => {
write!(
f,
"FUTURE EXTERNAL TABLES IN SCHEMA {}",
display_comma_separated(schemas)
)
}
GrantObjects::FutureViewsInSchema { schemas } => {
write!(
f,
"FUTURE VIEWS IN SCHEMA {}",
display_comma_separated(schemas)
)
}
GrantObjects::FutureMaterializedViewsInSchema { schemas } => {
write!(
f,
"FUTURE MATERIALIZED VIEWS IN SCHEMA {}",
display_comma_separated(schemas)
)
}
GrantObjects::FutureSequencesInSchema { schemas } => {
write!(
f,
"FUTURE SEQUENCES IN SCHEMA {}",
display_comma_separated(schemas)
)
}
GrantObjects::ResourceMonitors(objects) => {
write!(f, "RESOURCE MONITOR {}", display_comma_separated(objects))
}
@ -7000,7 +7191,7 @@ pub enum AssignmentTarget {
impl fmt::Display for AssignmentTarget {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
AssignmentTarget::ColumnName(column) => write!(f, "{}", column),
AssignmentTarget::ColumnName(column) => write!(f, "{column}"),
AssignmentTarget::Tuple(columns) => write!(f, "({})", display_comma_separated(columns)),
}
}
@ -7245,8 +7436,8 @@ impl fmt::Display for FunctionArguments {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
FunctionArguments::None => Ok(()),
FunctionArguments::Subquery(query) => write!(f, "({})", query),
FunctionArguments::List(args) => write!(f, "({})", args),
FunctionArguments::Subquery(query) => write!(f, "({query})"),
FunctionArguments::List(args) => write!(f, "({args})"),
}
}
}
@ -7267,7 +7458,7 @@ pub struct FunctionArgumentList {
impl fmt::Display for FunctionArgumentList {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(duplicate_treatment) = self.duplicate_treatment {
write!(f, "{} ", duplicate_treatment)?;
write!(f, "{duplicate_treatment} ")?;
}
write!(f, "{}", display_comma_separated(&self.args))?;
if !self.clauses.is_empty() {
@ -7327,7 +7518,7 @@ impl fmt::Display for FunctionArgumentClause {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment) => {
write!(f, "{}", null_treatment)
write!(f, "{null_treatment}")
}
FunctionArgumentClause::OrderBy(order_by) => {
write!(f, "ORDER BY {}", display_comma_separated(order_by))
@ -7783,12 +7974,12 @@ pub enum SqlOption {
impl fmt::Display for SqlOption {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
SqlOption::Clustered(c) => write!(f, "{}", c),
SqlOption::Clustered(c) => write!(f, "{c}"),
SqlOption::Ident(ident) => {
write!(f, "{}", ident)
write!(f, "{ident}")
}
SqlOption::KeyValue { key: name, value } => {
write!(f, "{} = {}", name, value)
write!(f, "{name} = {value}")
}
SqlOption::Partition {
column_name,
@ -7828,7 +8019,7 @@ impl fmt::Display for SqlOption {
SqlOption::NamedParenthesizedList(value) => {
write!(f, "{} = ", value.key)?;
if let Some(key) = &value.name {
write!(f, "{}", key)?;
write!(f, "{key}")?;
}
if !value.values.is_empty() {
write!(f, "({})", display_comma_separated(&value.values))?
@ -7871,6 +8062,70 @@ impl fmt::Display for SecretOption {
}
}
/// A `CREATE SERVER` statement.
///
/// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createserver.html)
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct CreateServerStatement {
pub name: ObjectName,
pub if_not_exists: bool,
pub server_type: Option<Ident>,
pub version: Option<Ident>,
pub foreign_data_wrapper: ObjectName,
pub options: Option<Vec<CreateServerOption>>,
}
impl fmt::Display for CreateServerStatement {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let CreateServerStatement {
name,
if_not_exists,
server_type,
version,
foreign_data_wrapper,
options,
} = self;
write!(
f,
"CREATE SERVER {if_not_exists}{name} ",
if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" },
)?;
if let Some(st) = server_type {
write!(f, "TYPE {st} ")?;
}
if let Some(v) = version {
write!(f, "VERSION {v} ")?;
}
write!(f, "FOREIGN DATA WRAPPER {foreign_data_wrapper}")?;
if let Some(o) = options {
write!(f, " OPTIONS ({o})", o = display_comma_separated(o))?;
}
Ok(())
}
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct CreateServerOption {
pub key: Ident,
pub value: Ident,
}
impl fmt::Display for CreateServerOption {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {}", self.key, self.value)
}
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
@ -7885,7 +8140,7 @@ impl fmt::Display for AttachDuckDBDatabaseOption {
AttachDuckDBDatabaseOption::ReadOnly(Some(true)) => write!(f, "READ_ONLY true"),
AttachDuckDBDatabaseOption::ReadOnly(Some(false)) => write!(f, "READ_ONLY false"),
AttachDuckDBDatabaseOption::ReadOnly(None) => write!(f, "READ_ONLY"),
AttachDuckDBDatabaseOption::Type(t) => write!(f, "TYPE {}", t),
AttachDuckDBDatabaseOption::Type(t) => write!(f, "TYPE {t}"),
}
}
}
@ -9198,12 +9453,12 @@ impl Display for RowAccessPolicy {
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct Tag {
pub key: Ident,
pub key: ObjectName,
pub value: String,
}
impl Tag {
pub fn new(key: Ident, value: String) -> Self {
pub fn new(key: ObjectName, value: String) -> Self {
Self { key, value }
}
}
@ -9408,10 +9663,10 @@ impl fmt::Display for ShowStatementIn {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.clause)?;
if let Some(parent_type) = &self.parent_type {
write!(f, " {}", parent_type)?;
write!(f, " {parent_type}")?;
}
if let Some(parent_name) = &self.parent_name {
write!(f, " {}", parent_name)?;
write!(f, " {parent_name}")?;
}
Ok(())
}
@ -9492,7 +9747,7 @@ impl fmt::Display for TableObject {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::TableName(table_name) => write!(f, "{table_name}"),
Self::TableFunction(func) => write!(f, "FUNCTION {}", func),
Self::TableFunction(func) => write!(f, "FUNCTION {func}"),
}
}
}
@ -9680,7 +9935,7 @@ pub struct ReturnStatement {
impl fmt::Display for ReturnStatement {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self.value {
Some(ReturnStatementValue::Expr(expr)) => write!(f, "RETURN {}", expr),
Some(ReturnStatementValue::Expr(expr)) => write!(f, "RETURN {expr}"),
None => write!(f, "RETURN"),
}
}
@ -9729,8 +9984,31 @@ impl fmt::Display for NullInclusion {
}
}
/// Checks membership of a value in a JSON array
///
/// Syntax:
/// ```sql
/// <value> MEMBER OF(<array>)
/// ```
/// [MySQL](https://dev.mysql.com/doc/refman/8.4/en/json-search-functions.html#operator_member-of)
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct MemberOf {
pub value: Box<Expr>,
pub array: Box<Expr>,
}
impl fmt::Display for MemberOf {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} MEMBER OF({})", self.value, self.array)
}
}
#[cfg(test)]
mod tests {
use crate::tokenizer::Location;
use super::*;
#[test]
@ -10026,4 +10304,16 @@ mod tests {
test_steps(OneOrManyWithParens::Many(vec![2]), vec![2], 3);
test_steps(OneOrManyWithParens::Many(vec![3, 4]), vec![3, 4], 4);
}
// Tests that the position in the code of an `Ident` does not affect its
// ordering.
#[test]
fn test_ident_ord() {
let mut a = Ident::with_span(Span::new(Location::new(1, 1), Location::new(1, 1)), "a");
let mut b = Ident::with_span(Span::new(Location::new(2, 2), Location::new(2, 2)), "b");
assert!(a < b);
std::mem::swap(&mut a.span, &mut b.span);
assert!(a < b);
}
}

View file

@ -1047,7 +1047,7 @@ impl fmt::Display for ConnectBy {
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct Setting {
pub key: Ident,
pub value: Value,
pub value: Expr,
}
impl fmt::Display for Setting {
@ -1183,7 +1183,7 @@ impl fmt::Display for TableIndexHints {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {} ", self.hint_type, self.index_type)?;
if let Some(for_clause) = &self.for_clause {
write!(f, "FOR {} ", for_clause)?;
write!(f, "FOR {for_clause} ")?;
}
write!(f, "({})", display_comma_separated(&self.index_names))
}
@ -1459,7 +1459,7 @@ impl fmt::Display for TableSampleQuantity {
}
write!(f, "{}", self.value)?;
if let Some(unit) = &self.unit {
write!(f, " {}", unit)?;
write!(f, " {unit}")?;
}
if self.parenthesized {
write!(f, ")")?;
@ -1552,7 +1552,7 @@ impl fmt::Display for TableSampleBucket {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "BUCKET {} OUT OF {}", self.bucket, self.total)?;
if let Some(on) = &self.on {
write!(f, " ON {}", on)?;
write!(f, " ON {on}")?;
}
Ok(())
}
@ -1561,19 +1561,19 @@ impl fmt::Display for TableSample {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.modifier)?;
if let Some(name) = &self.name {
write!(f, " {}", name)?;
write!(f, " {name}")?;
}
if let Some(quantity) = &self.quantity {
write!(f, " {}", quantity)?;
write!(f, " {quantity}")?;
}
if let Some(seed) = &self.seed {
write!(f, " {}", seed)?;
write!(f, " {seed}")?;
}
if let Some(bucket) = &self.bucket {
write!(f, " ({})", bucket)?;
write!(f, " ({bucket})")?;
}
if let Some(offset) = &self.offset {
write!(f, " OFFSET {}", offset)?;
write!(f, " OFFSET {offset}")?;
}
Ok(())
}
@ -1651,7 +1651,7 @@ impl fmt::Display for RowsPerMatch {
RowsPerMatch::AllRows(mode) => {
write!(f, "ALL ROWS PER MATCH")?;
if let Some(mode) = mode {
write!(f, " {}", mode)?;
write!(f, " {mode}")?;
}
Ok(())
}
@ -1777,7 +1777,7 @@ impl fmt::Display for MatchRecognizePattern {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use MatchRecognizePattern::*;
match self {
Symbol(symbol) => write!(f, "{}", symbol),
Symbol(symbol) => write!(f, "{symbol}"),
Exclude(symbol) => write!(f, "{{- {symbol} -}}"),
Permute(symbols) => write!(f, "PERMUTE({})", display_comma_separated(symbols)),
Concat(patterns) => write!(f, "{}", display_separated(patterns, " ")),
@ -2148,7 +2148,7 @@ impl fmt::Display for TableAliasColumnDef {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.name)?;
if let Some(ref data_type) = self.data_type {
write!(f, " {}", data_type)?;
write!(f, " {data_type}")?;
}
Ok(())
}
@ -2398,7 +2398,7 @@ impl fmt::Display for OrderBy {
write!(f, " {}", display_comma_separated(exprs))?;
}
OrderByKind::All(all) => {
write!(f, " ALL{}", all)?;
write!(f, " ALL{all}")?;
}
}
@ -2429,7 +2429,7 @@ impl fmt::Display for OrderByExpr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}{}", self.expr, self.options)?;
if let Some(ref with_fill) = self.with_fill {
write!(f, " {}", with_fill)?
write!(f, " {with_fill}")?
}
Ok(())
}
@ -2452,13 +2452,13 @@ impl fmt::Display for WithFill {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "WITH FILL")?;
if let Some(ref from) = self.from {
write!(f, " FROM {}", from)?;
write!(f, " FROM {from}")?;
}
if let Some(ref to) = self.to {
write!(f, " TO {}", to)?;
write!(f, " TO {to}")?;
}
if let Some(ref step) = self.step {
write!(f, " STEP {}", step)?;
write!(f, " STEP {step}")?;
}
Ok(())
}
@ -2487,7 +2487,7 @@ impl fmt::Display for InterpolateExpr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.column)?;
if let Some(ref expr) = self.expr {
write!(f, " AS {}", expr)?;
write!(f, " AS {expr}")?;
}
Ok(())
}
@ -2565,7 +2565,7 @@ impl fmt::Display for LimitClause {
Ok(())
}
LimitClause::OffsetCommaLimit { offset, limit } => {
write!(f, " LIMIT {}, {}", offset, limit)
write!(f, " LIMIT {offset}, {limit}")
}
}
}
@ -2684,6 +2684,79 @@ pub enum PipeOperator {
/// Syntax: `|> TABLESAMPLE SYSTEM (10 PERCENT)
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#tablesample_pipe_operator>
TableSample { sample: Box<TableSample> },
/// Renames columns in the input table.
///
/// Syntax: `|> RENAME old_name AS new_name, ...`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#rename_pipe_operator>
Rename { mappings: Vec<IdentWithAlias> },
/// Combines the input table with one or more tables using UNION.
///
/// Syntax: `|> UNION [ALL|DISTINCT] (<query>), (<query>), ...`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#union_pipe_operator>
Union {
set_quantifier: SetQuantifier,
queries: Vec<Query>,
},
/// Returns only the rows that are present in both the input table and the specified tables.
///
/// Syntax: `|> INTERSECT [DISTINCT] (<query>), (<query>), ...`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#intersect_pipe_operator>
Intersect {
set_quantifier: SetQuantifier,
queries: Vec<Query>,
},
/// Returns only the rows that are present in the input table but not in the specified tables.
///
/// Syntax: `|> EXCEPT DISTINCT (<query>), (<query>), ...`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#except_pipe_operator>
Except {
set_quantifier: SetQuantifier,
queries: Vec<Query>,
},
/// Calls a table function or procedure that returns a table.
///
/// Syntax: `|> CALL function_name(args) [AS alias]`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#call_pipe_operator>
Call {
function: Function,
alias: Option<Ident>,
},
/// Pivots data from rows to columns.
///
/// Syntax: `|> PIVOT(aggregate_function(column) FOR pivot_column IN (value1, value2, ...)) [AS alias]`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#pivot_pipe_operator>
Pivot {
aggregate_functions: Vec<ExprWithAlias>,
value_column: Vec<Ident>,
value_source: PivotValueSource,
alias: Option<Ident>,
},
/// The `UNPIVOT` pipe operator transforms columns into rows.
///
/// Syntax:
/// ```sql
/// |> UNPIVOT(value_column FOR name_column IN (column1, column2, ...)) [alias]
/// ```
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#unpivot_pipe_operator>
Unpivot {
value_column: Ident,
name_column: Ident,
unpivot_columns: Vec<Ident>,
alias: Option<Ident>,
},
/// Joins the input table with another table.
///
/// Syntax: `|> [JOIN_TYPE] JOIN <table> [alias] ON <condition>` or `|> [JOIN_TYPE] JOIN <table> [alias] USING (<columns>)`
///
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#join_pipe_operator>
Join(Join),
}
impl fmt::Display for PipeOperator {
@ -2702,12 +2775,12 @@ impl fmt::Display for PipeOperator {
write!(f, "DROP {}", display_comma_separated(columns.as_slice()))
}
PipeOperator::As { alias } => {
write!(f, "AS {}", alias)
write!(f, "AS {alias}")
}
PipeOperator::Limit { expr, offset } => {
write!(f, "LIMIT {}", expr)?;
write!(f, "LIMIT {expr}")?;
if let Some(offset) = offset {
write!(f, " OFFSET {}", offset)?;
write!(f, " OFFSET {offset}")?;
}
Ok(())
}
@ -2730,16 +2803,96 @@ impl fmt::Display for PipeOperator {
}
PipeOperator::Where { expr } => {
write!(f, "WHERE {}", expr)
write!(f, "WHERE {expr}")
}
PipeOperator::OrderBy { exprs } => {
write!(f, "ORDER BY {}", display_comma_separated(exprs.as_slice()))
}
PipeOperator::TableSample { sample } => {
write!(f, "{}", sample)
write!(f, "{sample}")
}
PipeOperator::Rename { mappings } => {
write!(f, "RENAME {}", display_comma_separated(mappings))
}
PipeOperator::Union {
set_quantifier,
queries,
} => Self::fmt_set_operation(f, "UNION", set_quantifier, queries),
PipeOperator::Intersect {
set_quantifier,
queries,
} => Self::fmt_set_operation(f, "INTERSECT", set_quantifier, queries),
PipeOperator::Except {
set_quantifier,
queries,
} => Self::fmt_set_operation(f, "EXCEPT", set_quantifier, queries),
PipeOperator::Call { function, alias } => {
write!(f, "CALL {function}")?;
Self::fmt_optional_alias(f, alias)
}
PipeOperator::Pivot {
aggregate_functions,
value_column,
value_source,
alias,
} => {
write!(
f,
"PIVOT({} FOR {} IN ({}))",
display_comma_separated(aggregate_functions),
Expr::CompoundIdentifier(value_column.to_vec()),
value_source
)?;
Self::fmt_optional_alias(f, alias)
}
PipeOperator::Unpivot {
value_column,
name_column,
unpivot_columns,
alias,
} => {
write!(
f,
"UNPIVOT({} FOR {} IN ({}))",
value_column,
name_column,
display_comma_separated(unpivot_columns)
)?;
Self::fmt_optional_alias(f, alias)
}
PipeOperator::Join(join) => write!(f, "{join}"),
}
}
}
impl PipeOperator {
/// Helper function to format optional alias for pipe operators
fn fmt_optional_alias(f: &mut fmt::Formatter<'_>, alias: &Option<Ident>) -> fmt::Result {
if let Some(alias) = alias {
write!(f, " AS {alias}")?;
}
Ok(())
}
/// Helper function to format set operations (UNION, INTERSECT, EXCEPT) with queries
fn fmt_set_operation(
f: &mut fmt::Formatter<'_>,
operation: &str,
set_quantifier: &SetQuantifier,
queries: &[Query],
) -> fmt::Result {
write!(f, "{operation}")?;
match set_quantifier {
SetQuantifier::None => {}
_ => {
write!(f, " {set_quantifier}")?;
}
}
write!(f, " ")?;
let parenthesized_queries: Vec<String> =
queries.iter().map(|query| format!("({query})")).collect();
write!(f, "{}", display_comma_separated(&parenthesized_queries))
}
}
@ -3016,7 +3169,7 @@ pub enum FormatClause {
impl fmt::Display for FormatClause {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
FormatClause::Identifier(ident) => write!(f, "FORMAT {}", ident),
FormatClause::Identifier(ident) => write!(f, "FORMAT {ident}"),
FormatClause::Null => write!(f, "FORMAT NULL"),
}
}
@ -3078,9 +3231,9 @@ impl fmt::Display for ForClause {
without_array_wrapper,
} => {
write!(f, "FOR JSON ")?;
write!(f, "{}", for_json)?;
write!(f, "{for_json}")?;
if let Some(root) = root {
write!(f, ", ROOT('{}')", root)?;
write!(f, ", ROOT('{root}')")?;
}
if *include_null_values {
write!(f, ", INCLUDE_NULL_VALUES")?;
@ -3098,7 +3251,7 @@ impl fmt::Display for ForClause {
r#type,
} => {
write!(f, "FOR XML ")?;
write!(f, "{}", for_xml)?;
write!(f, "{for_xml}")?;
if *binary_base64 {
write!(f, ", BINARY BASE64")?;
}
@ -3106,7 +3259,7 @@ impl fmt::Display for ForClause {
write!(f, ", TYPE")?;
}
if let Some(root) = root {
write!(f, ", ROOT('{}')", root)?;
write!(f, ", ROOT('{root}')")?;
}
if *elements {
write!(f, ", ELEMENTS")?;
@ -3133,7 +3286,7 @@ impl fmt::Display for ForXml {
ForXml::Raw(root) => {
write!(f, "RAW")?;
if let Some(root) = root {
write!(f, "('{}')", root)?;
write!(f, "('{root}')")?;
}
Ok(())
}
@ -3142,7 +3295,7 @@ impl fmt::Display for ForXml {
ForXml::Path(root) => {
write!(f, "PATH")?;
if let Some(root) = root {
write!(f, "('{}')", root)?;
write!(f, "('{root}')")?;
}
Ok(())
}
@ -3205,7 +3358,7 @@ impl fmt::Display for JsonTableColumn {
JsonTableColumn::Named(json_table_named_column) => {
write!(f, "{json_table_named_column}")
}
JsonTableColumn::ForOrdinality(ident) => write!(f, "{} FOR ORDINALITY", ident),
JsonTableColumn::ForOrdinality(ident) => write!(f, "{ident} FOR ORDINALITY"),
JsonTableColumn::Nested(json_table_nested_column) => {
write!(f, "{json_table_nested_column}")
}
@ -3271,10 +3424,10 @@ impl fmt::Display for JsonTableNamedColumn {
self.path
)?;
if let Some(on_empty) = &self.on_empty {
write!(f, " {} ON EMPTY", on_empty)?;
write!(f, " {on_empty} ON EMPTY")?;
}
if let Some(on_error) = &self.on_error {
write!(f, " {} ON ERROR", on_error)?;
write!(f, " {on_error} ON ERROR")?;
}
Ok(())
}
@ -3296,7 +3449,7 @@ impl fmt::Display for JsonTableColumnErrorHandling {
match self {
JsonTableColumnErrorHandling::Null => write!(f, "NULL"),
JsonTableColumnErrorHandling::Default(json_string) => {
write!(f, "DEFAULT {}", json_string)
write!(f, "DEFAULT {json_string}")
}
JsonTableColumnErrorHandling::Error => write!(f, "ERROR"),
}
@ -3429,12 +3582,12 @@ impl fmt::Display for XmlTableColumn {
default,
nullable,
} => {
write!(f, " {}", r#type)?;
write!(f, " {type}")?;
if let Some(p) = path {
write!(f, " PATH {}", p)?;
write!(f, " PATH {p}")?;
}
if let Some(d) = default {
write!(f, " DEFAULT {}", d)?;
write!(f, " DEFAULT {d}")?;
}
if !*nullable {
write!(f, " NOT NULL")?;
@ -3465,7 +3618,7 @@ impl fmt::Display for XmlPassingArgument {
}
write!(f, "{}", self.expr)?;
if let Some(alias) = &self.alias {
write!(f, " AS {}", alias)?;
write!(f, " AS {alias}")?;
}
Ok(())
}

View file

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
use crate::ast::query::SelectItemQualifiedWildcardKind;
use crate::ast::{query::SelectItemQualifiedWildcardKind, ColumnOptions};
use core::iter;
use crate::tokenizer::Span;
@ -28,16 +28,17 @@ use super::{
ConstraintCharacteristics, CopySource, CreateIndex, CreateTable, CreateTableOptions, Cte,
Delete, DoUpdate, ExceptSelectItem, ExcludeSelectItem, Expr, ExprWithAlias, Fetch, FromTable,
Function, FunctionArg, FunctionArgExpr, FunctionArgumentClause, FunctionArgumentList,
FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, Insert, Interpolate,
InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem, LateralView,
LimitClause, MatchRecognizePattern, Measure, NamedParenthesizedList, NamedWindowDefinition,
ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction, OnInsert, OpenStatement,
OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource, ProjectionSelect, Query,
RaiseStatement, RaiseStatementValue, ReferentialAction, RenameSelectItem, ReplaceSelectElement,
ReplaceSelectItem, Select, SelectInto, SelectItem, SetExpr, SqlOption, Statement, Subscript,
SymbolDefinition, TableAlias, TableAliasColumnDef, TableConstraint, TableFactor, TableObject,
TableOptionsClustered, TableWithJoins, UpdateTableFromKind, Use, Value, Values, ViewColumnDef,
WhileStatement, WildcardAdditionalOptions, With, WithFill,
FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, IndexColumn, Insert,
Interpolate, InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem,
LateralView, LimitClause, MatchRecognizePattern, Measure, NamedParenthesizedList,
NamedWindowDefinition, ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction,
OnInsert, OpenStatement, OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource,
ProjectionSelect, Query, RaiseStatement, RaiseStatementValue, ReferentialAction,
RenameSelectItem, ReplaceSelectElement, ReplaceSelectItem, Select, SelectInto, SelectItem,
SetExpr, SqlOption, Statement, Subscript, SymbolDefinition, TableAlias, TableAliasColumnDef,
TableConstraint, TableFactor, TableObject, TableOptionsClustered, TableWithJoins,
UpdateTableFromKind, Use, Value, Values, ViewColumnDef, WhileStatement,
WildcardAdditionalOptions, With, WithFill,
};
/// Given an iterator of spans, return the [Span::union] of all spans.
@ -422,6 +423,7 @@ impl Spanned for Statement {
Statement::CreateIndex(create_index) => create_index.span(),
Statement::CreateRole { .. } => Span::empty(),
Statement::CreateSecret { .. } => Span::empty(),
Statement::CreateServer { .. } => Span::empty(),
Statement::CreateConnector { .. } => Span::empty(),
Statement::AlterTable {
name,
@ -650,7 +652,7 @@ impl Spanned for TableConstraint {
name.iter()
.map(|i| i.span)
.chain(index_name.iter().map(|i| i.span))
.chain(columns.iter().map(|i| i.span))
.chain(columns.iter().map(|i| i.span()))
.chain(characteristics.iter().map(|i| i.span())),
),
TableConstraint::PrimaryKey {
@ -664,7 +666,7 @@ impl Spanned for TableConstraint {
name.iter()
.map(|i| i.span)
.chain(index_name.iter().map(|i| i.span))
.chain(columns.iter().map(|i| i.span))
.chain(columns.iter().map(|i| i.span()))
.chain(characteristics.iter().map(|i| i.span())),
),
TableConstraint::ForeignKey {
@ -700,7 +702,7 @@ impl Spanned for TableConstraint {
} => union_spans(
name.iter()
.map(|i| i.span)
.chain(columns.iter().map(|i| i.span)),
.chain(columns.iter().map(|i| i.span())),
),
TableConstraint::FulltextOrSpatial {
fulltext: _,
@ -711,7 +713,7 @@ impl Spanned for TableConstraint {
opt_index_name
.iter()
.map(|i| i.span)
.chain(columns.iter().map(|i| i.span)),
.chain(columns.iter().map(|i| i.span())),
),
}
}
@ -745,6 +747,12 @@ impl Spanned for CreateIndex {
}
}
impl Spanned for IndexColumn {
fn span(&self) -> Span {
self.column.span()
}
}
impl Spanned for CaseStatement {
fn span(&self) -> Span {
let CaseStatement {
@ -917,6 +925,7 @@ impl Spanned for AlterColumnOperation {
AlterColumnOperation::SetDataType {
data_type: _,
using,
had_set: _,
} => using.as_ref().map_or(Span::empty(), |u| u.span()),
AlterColumnOperation::AddGenerated { .. } => Span::empty(),
}
@ -984,10 +993,13 @@ impl Spanned for ViewColumnDef {
options,
} = self;
union_spans(
core::iter::once(name.span)
.chain(options.iter().flat_map(|i| i.iter().map(|k| k.span()))),
)
name.span.union_opt(&options.as_ref().map(|o| o.span()))
}
}
impl Spanned for ColumnOptions {
fn span(&self) -> Span {
union_spans(self.as_slice().iter().map(|i| i.span()))
}
}
@ -1048,7 +1060,9 @@ impl Spanned for CreateTableOptions {
match self {
CreateTableOptions::None => Span::empty(),
CreateTableOptions::With(vec) => union_spans(vec.iter().map(|i| i.span())),
CreateTableOptions::Options(vec) => union_spans(vec.iter().map(|i| i.span())),
CreateTableOptions::Options(vec) => {
union_spans(vec.as_slice().iter().map(|i| i.span()))
}
CreateTableOptions::Plain(vec) => union_spans(vec.iter().map(|i| i.span())),
CreateTableOptions::TableProperties(vec) => union_spans(vec.iter().map(|i| i.span())),
}
@ -1062,7 +1076,10 @@ impl Spanned for CreateTableOptions {
impl Spanned for AlterTableOperation {
fn span(&self) -> Span {
match self {
AlterTableOperation::AddConstraint(table_constraint) => table_constraint.span(),
AlterTableOperation::AddConstraint {
constraint,
not_valid: _,
} => constraint.span(),
AlterTableOperation::AddColumn {
column_keyword: _,
if_not_exists: _,
@ -1095,10 +1112,10 @@ impl Spanned for AlterTableOperation {
} => name.span,
AlterTableOperation::DropColumn {
has_column_keyword: _,
column_name,
column_names,
if_exists: _,
drop_behavior: _,
} => column_name.span,
} => union_spans(column_names.iter().map(|i| i.span)),
AlterTableOperation::AttachPartition { partition } => partition.span(),
AlterTableOperation::DetachPartition { partition } => partition.span(),
AlterTableOperation::FreezePartition {
@ -1183,6 +1200,7 @@ impl Spanned for AlterTableOperation {
AlterTableOperation::AutoIncrement { value, .. } => value.span(),
AlterTableOperation::Lock { .. } => Span::empty(),
AlterTableOperation::ReplicaIdentity { .. } => Span::empty(),
AlterTableOperation::ValidateConstraint { name } => name.span,
}
}
}
@ -1398,7 +1416,6 @@ impl Spanned for AssignmentTarget {
/// f.e. `IS NULL <expr>` reports as `<expr>::span`.
///
/// Missing spans:
/// - [Expr::TypedString] # missing span for data_type
/// - [Expr::MatchAgainst] # MySQL specific
/// - [Expr::RLike] # MySQL specific
/// - [Expr::Struct] # BigQuery specific
@ -1607,6 +1624,7 @@ impl Spanned for Expr {
Expr::OuterJoin(expr) => expr.span(),
Expr::Prior(expr) => expr.span(),
Expr::Lambda(_) => Span::empty(),
Expr::MemberOf(member_of) => member_of.value.span().union(&member_of.array.span()),
}
}
}

View file

@ -116,7 +116,6 @@ impl From<ValueWithSpan> for Value {
derive(Visit, VisitMut),
visit(with = "visit_value")
)]
pub enum Value {
/// Numeric literal
#[cfg(not(feature = "bigdecimal"))]
@ -551,16 +550,16 @@ impl fmt::Display for EscapeUnicodeStringLiteral<'_> {
write!(f, r#"\\"#)?;
}
x if x.is_ascii() => {
write!(f, "{}", c)?;
write!(f, "{c}")?;
}
_ => {
let codepoint = c as u32;
// if the character fits in 32 bits, we can use the \XXXX format
// otherwise, we need to use the \+XXXXXX format
if codepoint <= 0xFFFF {
write!(f, "\\{:04X}", codepoint)?;
write!(f, "\\{codepoint:04X}")?;
} else {
write!(f, "\\+{:06X}", codepoint)?;
write!(f, "\\+{codepoint:06X}")?;
}
}
}

View file

@ -926,10 +926,10 @@ mod tests {
#[test]
fn overflow() {
let cond = (0..1000)
.map(|n| format!("X = {}", n))
.map(|n| format!("X = {n}"))
.collect::<Vec<_>>()
.join(" OR ");
let sql = format!("SELECT x where {0}", cond);
let sql = format!("SELECT x where {cond}");
let dialect = GenericDialect {};
let tokens = Tokenizer::new(&dialect, sql.as_str()).tokenize().unwrap();

View file

@ -46,7 +46,11 @@ pub struct BigQueryDialect;
impl Dialect for BigQueryDialect {
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
self.maybe_parse_statement(parser)
if parser.parse_keyword(Keyword::BEGIN) {
return Some(parser.parse_begin_exception_end());
}
None
}
/// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#identifiers>
@ -141,48 +145,3 @@ impl Dialect for BigQueryDialect {
true
}
}
impl BigQueryDialect {
fn maybe_parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
if parser.peek_keyword(Keyword::BEGIN) {
return Some(self.parse_begin(parser));
}
None
}
/// Parse a `BEGIN` statement.
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend>
fn parse_begin(&self, parser: &mut Parser) -> Result<Statement, ParserError> {
parser.expect_keyword(Keyword::BEGIN)?;
let statements = parser.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
let has_exception_when_clause = parser.parse_keywords(&[
Keyword::EXCEPTION,
Keyword::WHEN,
Keyword::ERROR,
Keyword::THEN,
]);
let exception_statements = if has_exception_when_clause {
if !parser.peek_keyword(Keyword::END) {
Some(parser.parse_statement_list(&[Keyword::END])?)
} else {
Some(Default::default())
}
} else {
None
};
parser.expect_keyword(Keyword::END)?;
Ok(Statement::StartTransaction {
begin: true,
statements,
exception_statements,
has_end_keyword: true,
transaction: None,
modifier: None,
modes: Default::default(),
})
}
}

View file

@ -52,6 +52,10 @@ impl Dialect for GenericDialect {
true
}
fn supports_left_associative_joins_without_parens(&self) -> bool {
true
}
fn supports_connect_by(&self) -> bool {
true
}
@ -108,6 +112,14 @@ impl Dialect for GenericDialect {
true
}
fn supports_from_first_select(&self) -> bool {
true
}
fn supports_projection_trailing_commas(&self) -> bool {
true
}
fn supports_asc_desc_in_column_definition(&self) -> bool {
true
}

View file

@ -278,6 +278,34 @@ pub trait Dialect: Debug + Any {
false
}
/// Indicates whether the dialect supports left-associative join parsing
/// by default when parentheses are omitted in nested joins.
///
/// Most dialects (like MySQL or Postgres) assume **left-associative** precedence,
/// so a query like:
///
/// ```sql
/// SELECT * FROM t1 NATURAL JOIN t5 INNER JOIN t0 ON ...
/// ```
/// is interpreted as:
/// ```sql
/// ((t1 NATURAL JOIN t5) INNER JOIN t0 ON ...)
/// ```
/// and internally represented as a **flat list** of joins.
///
/// In contrast, some dialects (e.g. **Snowflake**) assume **right-associative**
/// precedence and interpret the same query as:
/// ```sql
/// (t1 NATURAL JOIN (t5 INNER JOIN t0 ON ...))
/// ```
/// which results in a **nested join** structure in the AST.
///
/// If this method returns `false`, the parser must build nested join trees
/// even in the absence of parentheses to reflect the correct associativity
fn supports_left_associative_joins_without_parens(&self) -> bool {
true
}
/// Returns true if the dialect supports the `(+)` syntax for OUTER JOIN.
fn supports_outer_join_operator(&self) -> bool {
false
@ -587,7 +615,7 @@ pub trait Dialect: Debug + Any {
}
let token = parser.peek_token();
debug!("get_next_precedence_full() {:?}", token);
debug!("get_next_precedence_full() {token:?}");
match token.token {
Token::Word(w) if w.keyword == Keyword::OR => Ok(p!(Or)),
Token::Word(w) if w.keyword == Keyword::AND => Ok(p!(And)),
@ -621,6 +649,7 @@ pub trait Dialect: Debug + Any {
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)),
_ => Ok(self.prec_unknown()),
},
Token::Word(w) if w.keyword == Keyword::IS => Ok(p!(Is)),
@ -633,6 +662,7 @@ pub trait Dialect: Debug + Any {
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::OPERATOR => Ok(p!(Between)),
Token::Word(w) if w.keyword == Keyword::DIV => Ok(p!(MulDivModOp)),
Token::Period => Ok(p!(Period)),
@ -1028,6 +1058,24 @@ pub trait Dialect: Debug + Any {
fn supports_set_names(&self) -> bool {
false
}
fn supports_space_separated_column_options(&self) -> bool {
false
}
/// Returns true if the dialect supports the `USING` clause in an `ALTER COLUMN` statement.
/// Example:
/// ```sql
/// ALTER TABLE tbl ALTER COLUMN col SET DATA TYPE <type> USING <exp>`
/// ```
fn supports_alter_column_type_using(&self) -> bool {
false
}
/// Returns true if the dialect supports `ALTER TABLE tbl DROP COLUMN c1, ..., cn`
fn supports_comma_separated_drop_column_list(&self) -> bool {
false
}
}
/// This represents the operators for which precedence must be defined

View file

@ -104,7 +104,7 @@ impl Dialect for PostgreSqlDialect {
fn get_next_precedence(&self, parser: &Parser) -> Option<Result<u8, ParserError>> {
let token = parser.peek_token();
debug!("get_next_precedence() {:?}", token);
debug!("get_next_precedence() {token:?}");
// we only return some custom value here when the behaviour (not merely the numeric value) differs
// from the default implementation
@ -258,4 +258,8 @@ impl Dialect for PostgreSqlDialect {
fn supports_set_names(&self) -> bool {
true
}
fn supports_alter_column_type_using(&self) -> bool {
true
}
}

View file

@ -80,13 +80,15 @@ impl Dialect for RedshiftSqlDialect {
}
fn is_identifier_start(&self, ch: char) -> bool {
// Extends Postgres dialect with sharp
PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#'
// Extends Postgres dialect with sharp and UTF-8 multibyte chars
// https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#' || !ch.is_ascii()
}
fn is_identifier_part(&self, ch: char) -> bool {
// Extends Postgres dialect with sharp
PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#'
// Extends Postgres dialect with sharp and UTF-8 multibyte chars
// https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#' || !ch.is_ascii()
}
/// redshift has `CONVERT(type, value)` instead of `CONVERT(value, type)`

View file

@ -131,6 +131,10 @@ impl Dialect for SnowflakeDialect {
}
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
if parser.parse_keyword(Keyword::BEGIN) {
return Some(parser.parse_begin_exception_end());
}
if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) {
// ALTER SESSION
let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) {
@ -279,6 +283,10 @@ impl Dialect for SnowflakeDialect {
true
}
fn supports_left_associative_joins_without_parens(&self) -> bool {
false
}
fn is_reserved_for_identifier(&self, kw: Keyword) -> bool {
// Unreserve some keywords that Snowflake accepts as identifiers
// See: https://docs.snowflake.com/en/sql-reference/reserved-keywords
@ -293,9 +301,8 @@ impl Dialect for SnowflakeDialect {
true
}
fn is_select_item_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
explicit
|| match kw {
fn is_column_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool {
match kw {
// The following keywords can be considered an alias as long as
// they are not followed by other tokens that may change their meaning
// e.g. `SELECT * EXCEPT (col1) FROM tbl`
@ -352,6 +359,14 @@ impl Dialect for SnowflakeDialect {
fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
&RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR
}
fn supports_space_separated_column_options(&self) -> bool {
true
}
fn supports_comma_separated_drop_column_list(&self) -> bool {
true
}
}
fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result<Statement, ParserError> {
@ -1177,7 +1192,7 @@ fn parse_column_policy_property(
parser: &mut Parser,
with: bool,
) -> Result<ColumnPolicyProperty, ParserError> {
let policy_name = parser.parse_identifier()?;
let policy_name = parser.parse_object_name(false)?;
let using_columns = if parser.parse_keyword(Keyword::USING) {
parser.expect_token(&Token::LParen)?;
let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;

View file

@ -395,6 +395,7 @@ define_keywords!(
FUNCTION,
FUNCTIONS,
FUSION,
FUTURE,
GENERAL,
GENERATE,
GENERATED,
@ -646,6 +647,7 @@ define_keywords!(
ORDER,
ORDINALITY,
ORGANIZATION,
OTHER,
OUT,
OUTER,
OUTPUT,
@ -814,6 +816,7 @@ define_keywords!(
SERDE,
SERDEPROPERTIES,
SERIALIZABLE,
SERVER,
SERVICE,
SESSION,
SESSION_USER,
@ -934,6 +937,8 @@ define_keywords!(
TRY,
TRY_CAST,
TRY_CONVERT,
TSQUERY,
TSVECTOR,
TUPLE,
TYPE,
UBIGINT,
@ -977,6 +982,7 @@ define_keywords!(
UUID,
VACUUM,
VALID,
VALIDATE,
VALIDATION_MODE,
VALUE,
VALUES,
@ -1012,6 +1018,7 @@ define_keywords!(
WITHOUT,
WITHOUT_ARRAY_WRAPPER,
WORK,
WRAPPER,
WRITE,
XML,
XMLNAMESPACES,

File diff suppressed because it is too large Load diff

View file

@ -270,7 +270,7 @@ impl TestedDialects {
tokenizer = tokenizer.with_unescape(options.unescape);
}
let tokens = tokenizer.tokenize().unwrap();
assert_eq!(expected, tokens, "Tokenized differently for {:?}", dialect);
assert_eq!(expected, tokens, "Tokenized differently for {dialect:?}");
});
}
}
@ -366,6 +366,11 @@ pub fn number(n: &str) -> Value {
Value::Number(n.parse().unwrap(), false)
}
/// Creates a [Value::SingleQuotedString]
pub fn single_quoted_string(s: impl Into<String>) -> Value {
Value::SingleQuotedString(s.into())
}
pub fn table_alias(name: impl Into<String>) -> Option<TableAlias> {
Some(TableAlias {
name: Ident::new(name),
@ -448,3 +453,52 @@ pub fn call(function: &str, args: impl IntoIterator<Item = Expr>) -> Expr {
within_group: vec![],
})
}
/// Gets the first index column (mysql calls it a key part) of the first index found in a
/// [`Statement::CreateIndex`], [`Statement::CreateTable`], or [`Statement::AlterTable`].
pub fn index_column(stmt: Statement) -> Expr {
match stmt {
Statement::CreateIndex(CreateIndex { columns, .. }) => {
columns.first().unwrap().column.expr.clone()
}
Statement::CreateTable(CreateTable { constraints, .. }) => {
match constraints.first().unwrap() {
TableConstraint::Index { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::Unique { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::PrimaryKey { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::FulltextOrSpatial { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
}
}
Statement::AlterTable { operations, .. } => match operations.first().unwrap() {
AlterTableOperation::AddConstraint { constraint, .. } => {
match constraint {
TableConstraint::Index { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::Unique { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::PrimaryKey { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::FulltextOrSpatial {
columns,
..
} => columns.first().unwrap().column.expr.clone(),
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
}
}
_ => panic!("Expected a constraint"),
},
_ => panic!("Expected CREATE INDEX, ALTER TABLE, or CREATE TABLE, got: {stmt:?}"),
}
}

View file

@ -1751,7 +1751,7 @@ impl<'a> Tokenizer<'a> {
(None, Some(tok)) => Ok(Some(tok)),
(None, None) => self.tokenizer_error(
chars.location(),
format!("Expected a valid binary operator after '{}'", prefix),
format!("Expected a valid binary operator after '{prefix}'"),
),
}
}
@ -1809,7 +1809,7 @@ impl<'a> Tokenizer<'a> {
chars.next();
let mut temp = String::new();
let end_delimiter = format!("${}$", value);
let end_delimiter = format!("${value}$");
loop {
match chars.next() {
@ -2402,13 +2402,13 @@ fn take_char_from_hex_digits(
location: chars.location(),
})?;
let digit = next_char.to_digit(16).ok_or_else(|| TokenizerError {
message: format!("Invalid hex digit in escaped unicode string: {}", next_char),
message: format!("Invalid hex digit in escaped unicode string: {next_char}"),
location: chars.location(),
})?;
result = result * 16 + digit;
}
char::from_u32(result).ok_or_else(|| TokenizerError {
message: format!("Invalid unicode character: {:x}", result),
message: format!("Invalid unicode character: {result:x}"),
location: chars.location(),
})
}
@ -3504,7 +3504,7 @@ mod tests {
}
fn check_unescape(s: &str, expected: Option<&str>) {
let s = format!("'{}'", s);
let s = format!("'{s}'");
let mut state = State {
peekable: s.chars().peekable(),
line: 0,

View file

@ -261,10 +261,10 @@ fn parse_at_at_identifier() {
#[test]
fn parse_begin() {
let sql = r#"BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT 2; END"#;
let sql = r#"BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT 2; RAISE USING MESSAGE = FORMAT('ERR: %s', 'Bad'); END"#;
let Statement::StartTransaction {
statements,
exception_statements,
exception,
has_end_keyword,
..
} = bigquery().verified_stmt(sql)
@ -272,7 +272,10 @@ fn parse_begin() {
unreachable!();
};
assert_eq!(1, statements.len());
assert_eq!(1, exception_statements.unwrap().len());
assert!(exception.is_some());
let exception = exception.unwrap();
assert_eq!(1, exception.len());
assert!(has_end_keyword);
bigquery().verified_stmt(
@ -352,14 +355,16 @@ fn parse_create_view_with_options() {
ViewColumnDef {
name: Ident::new("age"),
data_type: None,
options: Some(vec![ColumnOption::Options(vec![SqlOption::KeyValue {
options: Some(ColumnOptions::CommaSeparated(vec![ColumnOption::Options(
vec![SqlOption::KeyValue {
key: Ident::new("description"),
value: Expr::Value(
Value::DoubleQuotedString("field age".to_string()).with_span(
Span::new(Location::new(1, 42), Location::new(1, 52))
)
),
}])]),
}]
)])),
},
],
columns
@ -601,11 +606,13 @@ fn parse_nested_data_types() {
field_name: Some("a".into()),
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(
Box::new(DataType::Int64,)
))
)),
options: None,
},
StructField {
field_name: Some("b".into()),
field_type: DataType::Bytes(Some(42))
field_type: DataType::Bytes(Some(42)),
options: None,
},
],
StructBracketKind::AngleBrackets
@ -619,6 +626,7 @@ fn parse_nested_data_types() {
vec![StructField {
field_name: None,
field_type: DataType::Int64,
options: None,
}],
StructBracketKind::AngleBrackets
),
@ -632,35 +640,6 @@ fn parse_nested_data_types() {
}
}
#[test]
fn parse_invalid_brackets() {
let sql = "SELECT STRUCT<INT64>>(NULL)";
assert_eq!(
bigquery_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError::ParserError("unmatched > in STRUCT literal".to_string())
);
let sql = "SELECT STRUCT<STRUCT<INT64>>>(NULL)";
assert_eq!(
bigquery_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError::ParserError("Expected: (, found: >".to_string())
);
let sql = "CREATE TABLE table (x STRUCT<STRUCT<INT64>>>)";
assert_eq!(
bigquery_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError::ParserError(
"Expected: ',' or ')' after column definition, found: >".to_string()
)
);
}
#[test]
fn parse_tuple_struct_literal() {
// tuple syntax: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#tuple_syntax
@ -771,6 +750,7 @@ fn parse_typed_struct_syntax_bigquery() {
fields: vec![StructField {
field_name: None,
field_type: DataType::Int64,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -799,7 +779,8 @@ fn parse_typed_struct_syntax_bigquery() {
quote_style: None,
span: Span::empty(),
}),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
},
StructField {
field_name: Some(Ident {
@ -807,7 +788,8 @@ fn parse_typed_struct_syntax_bigquery() {
quote_style: None,
span: Span::empty(),
}),
field_type: DataType::String(None)
field_type: DataType::String(None),
options: None,
},
]
},
@ -825,17 +807,20 @@ fn parse_typed_struct_syntax_bigquery() {
field_name: Some("arr".into()),
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
DataType::Float64
)))
))),
options: None,
},
StructField {
field_name: Some("str".into()),
field_type: DataType::Struct(
vec![StructField {
field_name: None,
field_type: DataType::Bool
field_type: DataType::Bool,
options: None,
}],
StructBracketKind::AngleBrackets
)
),
options: None,
},
]
},
@ -858,13 +843,15 @@ fn parse_typed_struct_syntax_bigquery() {
field_type: DataType::Struct(
Default::default(),
StructBracketKind::AngleBrackets
)
),
options: None,
},
StructField {
field_name: Some("y".into()),
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
DataType::Struct(Default::default(), StructBracketKind::AngleBrackets)
)))
))),
options: None,
},
]
},
@ -879,7 +866,8 @@ fn parse_typed_struct_syntax_bigquery() {
values: vec![Expr::Value(Value::Boolean(true).with_empty_span())],
fields: vec![StructField {
field_name: None,
field_type: DataType::Bool
field_type: DataType::Bool,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -891,7 +879,8 @@ fn parse_typed_struct_syntax_bigquery() {
)],
fields: vec![StructField {
field_name: None,
field_type: DataType::Bytes(Some(42))
field_type: DataType::Bytes(Some(42)),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -907,7 +896,8 @@ fn parse_typed_struct_syntax_bigquery() {
)],
fields: vec![StructField {
field_name: None,
field_type: DataType::Date
field_type: DataType::Date,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -916,11 +906,15 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Datetime(None),
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()),
span: Span::empty(),
},
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Datetime(None)
field_type: DataType::Datetime(None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -930,7 +924,8 @@ fn parse_typed_struct_syntax_bigquery() {
values: vec![Expr::value(number("5.0"))],
fields: vec![StructField {
field_name: None,
field_type: DataType::Float64
field_type: DataType::Float64,
options: None,
}]
},
expr_from_projection(&select.projection[2])
@ -940,7 +935,8 @@ fn parse_typed_struct_syntax_bigquery() {
values: vec![Expr::value(number("1"))],
fields: vec![StructField {
field_name: None,
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
}]
},
expr_from_projection(&select.projection[3])
@ -962,7 +958,8 @@ fn parse_typed_struct_syntax_bigquery() {
})],
fields: vec![StructField {
field_name: None,
field_type: DataType::Interval
field_type: DataType::Interval,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -971,13 +968,17 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::JSON,
value: ValueWithSpan {
value: Value::SingleQuotedString(
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
)
),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::JSON
field_type: DataType::JSON,
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -993,7 +994,8 @@ fn parse_typed_struct_syntax_bigquery() {
)],
fields: vec![StructField {
field_name: None,
field_type: DataType::String(Some(42))
field_type: DataType::String(Some(42)),
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1002,11 +1004,17 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Timestamp(None, TimezoneInfo::None),
value: Value::SingleQuotedString("2008-12-25 15:30:00 America/Los_Angeles".into())
value: ValueWithSpan {
value: Value::SingleQuotedString(
"2008-12-25 15:30:00 America/Los_Angeles".into()
),
span: Span::empty(),
},
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Timestamp(None, TimezoneInfo::None)
field_type: DataType::Timestamp(None, TimezoneInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1016,11 +1024,15 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Time(None, TimezoneInfo::None),
value: Value::SingleQuotedString("15:30:00".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("15:30:00".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Time(None, TimezoneInfo::None)
field_type: DataType::Time(None, TimezoneInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[2])
@ -1033,11 +1045,15 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Numeric(ExactNumberInfo::None),
value: Value::SingleQuotedString("1".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Numeric(ExactNumberInfo::None)
field_type: DataType::Numeric(ExactNumberInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1046,11 +1062,15 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::BigNumeric(ExactNumberInfo::None),
value: Value::SingleQuotedString("1".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::BigNumeric(ExactNumberInfo::None)
field_type: DataType::BigNumeric(ExactNumberInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1067,10 +1087,12 @@ fn parse_typed_struct_syntax_bigquery() {
StructField {
field_name: Some("key".into()),
field_type: DataType::Int64,
options: None,
},
StructField {
field_name: Some("value".into()),
field_type: DataType::Int64,
options: None,
},
]
},
@ -1092,6 +1114,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
fields: vec![StructField {
field_name: None,
field_type: DataType::Int64,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1120,7 +1143,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
quote_style: None,
span: Span::empty(),
}),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
},
StructField {
field_name: Some(Ident {
@ -1128,7 +1152,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
quote_style: None,
span: Span::empty(),
}),
field_type: DataType::String(None)
field_type: DataType::String(None),
options: None,
},
]
},
@ -1151,13 +1176,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
field_type: DataType::Struct(
Default::default(),
StructBracketKind::AngleBrackets
)
),
options: None,
},
StructField {
field_name: Some("y".into()),
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
DataType::Struct(Default::default(), StructBracketKind::AngleBrackets)
)))
))),
options: None,
},
]
},
@ -1172,7 +1199,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
values: vec![Expr::Value(Value::Boolean(true).with_empty_span())],
fields: vec![StructField {
field_name: None,
field_type: DataType::Bool
field_type: DataType::Bool,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1184,7 +1212,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
)],
fields: vec![StructField {
field_name: None,
field_type: DataType::Bytes(Some(42))
field_type: DataType::Bytes(Some(42)),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1200,7 +1229,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
)],
fields: vec![StructField {
field_name: None,
field_type: DataType::Date
field_type: DataType::Date,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1209,11 +1239,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Datetime(None),
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Datetime(None)
field_type: DataType::Datetime(None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1223,7 +1257,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
values: vec![Expr::value(number("5.0"))],
fields: vec![StructField {
field_name: None,
field_type: DataType::Float64
field_type: DataType::Float64,
options: None,
}]
},
expr_from_projection(&select.projection[2])
@ -1233,7 +1268,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
values: vec![Expr::value(number("1"))],
fields: vec![StructField {
field_name: None,
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
}]
},
expr_from_projection(&select.projection[3])
@ -1255,7 +1291,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
})],
fields: vec![StructField {
field_name: None,
field_type: DataType::Interval
field_type: DataType::Interval,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1264,13 +1301,17 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::JSON,
value: ValueWithSpan {
value: Value::SingleQuotedString(
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
)
),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::JSON
field_type: DataType::JSON,
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1286,7 +1327,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
)],
fields: vec![StructField {
field_name: None,
field_type: DataType::String(Some(42))
field_type: DataType::String(Some(42)),
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1295,11 +1337,17 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Timestamp(None, TimezoneInfo::None),
value: Value::SingleQuotedString("2008-12-25 15:30:00 America/Los_Angeles".into())
value: ValueWithSpan {
value: Value::SingleQuotedString(
"2008-12-25 15:30:00 America/Los_Angeles".into()
),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Timestamp(None, TimezoneInfo::None)
field_type: DataType::Timestamp(None, TimezoneInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1309,11 +1357,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Time(None, TimezoneInfo::None),
value: Value::SingleQuotedString("15:30:00".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("15:30:00".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Time(None, TimezoneInfo::None)
field_type: DataType::Time(None, TimezoneInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[2])
@ -1326,11 +1378,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Numeric(ExactNumberInfo::None),
value: Value::SingleQuotedString("1".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Numeric(ExactNumberInfo::None)
field_type: DataType::Numeric(ExactNumberInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1339,11 +1395,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::BigNumeric(ExactNumberInfo::None),
value: Value::SingleQuotedString("1".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::BigNumeric(ExactNumberInfo::None)
field_type: DataType::BigNumeric(ExactNumberInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1360,7 +1420,8 @@ fn parse_typed_struct_with_field_name_bigquery() {
values: vec![Expr::value(number("5"))],
fields: vec![StructField {
field_name: Some(Ident::from("x")),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1372,7 +1433,8 @@ fn parse_typed_struct_with_field_name_bigquery() {
)],
fields: vec![StructField {
field_name: Some(Ident::from("y")),
field_type: DataType::String(None)
field_type: DataType::String(None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1387,11 +1449,13 @@ fn parse_typed_struct_with_field_name_bigquery() {
fields: vec![
StructField {
field_name: Some(Ident::from("x")),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
},
StructField {
field_name: Some(Ident::from("y")),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
}
]
},
@ -1409,7 +1473,8 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() {
values: vec![Expr::value(number("5"))],
fields: vec![StructField {
field_name: Some(Ident::from("x")),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1421,7 +1486,8 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() {
)],
fields: vec![StructField {
field_name: Some(Ident::from("y")),
field_type: DataType::String(None)
field_type: DataType::String(None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1436,11 +1502,13 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() {
fields: vec![
StructField {
field_name: Some(Ident::from("x")),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
},
StructField {
field_name: Some(Ident::from("y")),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
}
]
},
@ -2365,7 +2433,10 @@ fn test_triple_quote_typed_strings() {
assert_eq!(
Expr::TypedString {
data_type: DataType::JSON,
value: Value::TripleDoubleQuotedString(r#"{"foo":"bar's"}"#.into())
value: ValueWithSpan {
value: Value::TripleDoubleQuotedString(r#"{"foo":"bar's"}"#.into()),
span: Span::empty(),
}
},
expr
);
@ -2407,3 +2478,91 @@ fn test_any_type() {
fn test_any_type_dont_break_custom_type() {
bigquery_and_generic().verified_stmt("CREATE TABLE foo (x ANY)");
}
#[test]
fn test_struct_field_options() {
bigquery().verified_stmt(concat!(
"CREATE TABLE my_table (",
"f0 STRUCT<a STRING, b INT64>, ",
"f1 STRUCT<",
"a STRING OPTIONS(description = 'This is a string', type = 'string'), ",
"b INT64",
"> OPTIONS(description = 'This is a struct field')",
")",
));
}
#[test]
fn test_struct_trailing_and_nested_bracket() {
bigquery().verified_stmt(concat!(
"CREATE TABLE my_table (",
"f0 STRING, ",
"f1 STRUCT<a STRING, b STRUCT<c INT64, d STRING>>, ",
"f2 STRING",
")",
));
// More complex nested structs
bigquery().verified_stmt(concat!(
"CREATE TABLE my_table (",
"f0 STRING, ",
"f1 STRUCT<a STRING, b STRUCT<c INT64, d STRUCT<e STRING>>>, ",
"f2 STRUCT<h STRING, i STRUCT<j INT64, k STRUCT<l STRUCT<m STRING>>>>, ",
"f3 STRUCT<e STRING, f STRUCT<c INT64>>",
")",
));
// Bad case with missing closing bracket
assert_eq!(
ParserError::ParserError("Expected: >, found: )".to_owned()),
bigquery()
.parse_sql_statements("CREATE TABLE my_table(f1 STRUCT<a STRING, b INT64)")
.unwrap_err()
);
// Bad case with redundant closing bracket
assert_eq!(
ParserError::ParserError(
"unmatched > after parsing data type STRUCT<a STRING, b INT64>)".to_owned()
),
bigquery()
.parse_sql_statements("CREATE TABLE my_table(f1 STRUCT<a STRING, b INT64>>)")
.unwrap_err()
);
// Base case with redundant closing bracket in nested struct
assert_eq!(
ParserError::ParserError(
"Expected: ',' or ')' after column definition, found: >".to_owned()
),
bigquery()
.parse_sql_statements("CREATE TABLE my_table(f1 STRUCT<a STRUCT<b INT>>>, c INT64)")
.unwrap_err()
);
let sql = "SELECT STRUCT<INT64>>(NULL)";
assert_eq!(
bigquery_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError::ParserError("unmatched > in STRUCT literal".to_string())
);
let sql = "SELECT STRUCT<STRUCT<INT64>>>(NULL)";
assert_eq!(
bigquery_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError::ParserError("Expected: (, found: >".to_string())
);
let sql = "CREATE TABLE table (x STRUCT<STRUCT<INT64>>>)";
assert_eq!(
bigquery_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError::ParserError(
"Expected: ',' or ')' after column definition, found: >".to_string()
)
);
}

View file

@ -28,7 +28,7 @@ use test_utils::*;
use sqlparser::ast::Expr::{BinaryOp, Identifier};
use sqlparser::ast::SelectItem::UnnamedExpr;
use sqlparser::ast::TableFactor::Table;
use sqlparser::ast::Value::Number;
use sqlparser::ast::Value::Boolean;
use sqlparser::ast::*;
use sqlparser::dialect::ClickHouseDialect;
use sqlparser::dialect::GenericDialect;
@ -224,6 +224,10 @@ fn parse_create_table() {
clickhouse().verified_stmt(
r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#,
);
clickhouse().one_statement_parses_to(
"CREATE TABLE x (a int) ENGINE = MergeTree() ORDER BY a",
"CREATE TABLE x (a INT) ENGINE = MergeTree ORDER BY a",
);
}
#[test]
@ -669,11 +673,13 @@ fn parse_create_table_with_nested_data_types() {
DataType::Tuple(vec![
StructField {
field_name: None,
field_type: DataType::FixedString(128)
field_type: DataType::FixedString(128),
options: None,
},
StructField {
field_name: None,
field_type: DataType::Int128
field_type: DataType::Int128,
options: None,
}
])
))),
@ -685,12 +691,14 @@ fn parse_create_table_with_nested_data_types() {
StructField {
field_name: Some("a".into()),
field_type: DataType::Datetime64(9, None),
options: None,
},
StructField {
field_name: Some("b".into()),
field_type: DataType::Array(ArrayElemTypeDef::Parenthesis(
Box::new(DataType::Uuid)
))
)),
options: None,
},
]),
options: vec![],
@ -910,7 +918,7 @@ fn parse_create_view_with_fields_data_types() {
}]),
vec![]
)),
options: None
options: None,
},
ViewColumnDef {
name: "f".into(),
@ -922,7 +930,7 @@ fn parse_create_view_with_fields_data_types() {
}]),
vec![]
)),
options: None
options: None,
},
]
);
@ -961,38 +969,103 @@ fn parse_limit_by() {
#[test]
fn parse_settings_in_query() {
match clickhouse_and_generic()
.verified_stmt(r#"SELECT * FROM t SETTINGS max_threads = 1, max_block_size = 10000"#)
{
Statement::Query(query) => {
assert_eq!(
query.settings,
Some(vec![
Setting {
key: Ident::new("max_threads"),
value: Number("1".parse().unwrap(), false)
},
Setting {
key: Ident::new("max_block_size"),
value: Number("10000".parse().unwrap(), false)
},
])
);
fn check_settings(sql: &str, expected: Vec<Setting>) {
match clickhouse_and_generic().verified_stmt(sql) {
Statement::Query(q) => {
assert_eq!(q.settings, Some(expected));
}
_ => unreachable!(),
}
}
for (sql, expected_settings) in [
(
r#"SELECT * FROM t SETTINGS max_threads = 1, max_block_size = 10000"#,
vec![
Setting {
key: Ident::new("max_threads"),
value: Expr::value(number("1")),
},
Setting {
key: Ident::new("max_block_size"),
value: Expr::value(number("10000")),
},
],
),
(
r#"SELECT * FROM t SETTINGS additional_table_filters = {'table_1': 'x != 2'}"#,
vec![Setting {
key: Ident::new("additional_table_filters"),
value: Expr::Dictionary(vec![DictionaryField {
key: Ident::with_quote('\'', "table_1"),
value: Expr::value(single_quoted_string("x != 2")).into(),
}]),
}],
),
(
r#"SELECT * FROM t SETTINGS additional_result_filter = 'x != 2', query_plan_optimize_lazy_materialization = false"#,
vec![
Setting {
key: Ident::new("additional_result_filter"),
value: Expr::value(single_quoted_string("x != 2")),
},
Setting {
key: Ident::new("query_plan_optimize_lazy_materialization"),
value: Expr::value(Boolean(false)),
},
],
),
] {
check_settings(sql, expected_settings);
}
let invalid_cases = vec![
"SELECT * FROM t SETTINGS a",
("SELECT * FROM t SETTINGS a", "Expected: =, found: EOF"),
(
"SELECT * FROM t SETTINGS a=",
"SELECT * FROM t SETTINGS a=1, b",
"Expected: an expression, found: EOF",
),
("SELECT * FROM t SETTINGS a=1, b", "Expected: =, found: EOF"),
(
"SELECT * FROM t SETTINGS a=1, b=",
"SELECT * FROM t SETTINGS a=1, b=c",
"Expected: an expression, found: EOF",
),
(
"SELECT * FROM t SETTINGS a = {",
"Expected: identifier, found: EOF",
),
(
"SELECT * FROM t SETTINGS a = {'b'",
"Expected: :, found: EOF",
),
(
"SELECT * FROM t SETTINGS a = {'b': ",
"Expected: an expression, found: EOF",
),
(
"SELECT * FROM t SETTINGS a = {'b': 'c',}",
"Expected: identifier, found: }",
),
(
"SELECT * FROM t SETTINGS a = {'b': 'c', 'd'}",
"Expected: :, found: }",
),
(
"SELECT * FROM t SETTINGS a = {'b': 'c', 'd': }",
"Expected: an expression, found: }",
),
(
"SELECT * FROM t SETTINGS a = {ANY(b)}",
"Expected: :, found: (",
),
];
for sql in invalid_cases {
for (sql, error_msg) in invalid_cases {
assert_eq!(
clickhouse_and_generic()
.parse_sql_statements(sql)
.expect_err("Expected: SETTINGS key = value, found: ");
.unwrap_err(),
ParserError(error_msg.to_string())
);
}
}
#[test]
@ -1341,7 +1414,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
clickhouse().verified_stmt(&format!("USE {}", object_name)),
clickhouse().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -1349,7 +1422,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
clickhouse().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
clickhouse().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),
@ -1363,7 +1436,7 @@ fn parse_use() {
fn test_query_with_format_clause() {
let format_options = vec!["TabSeparated", "JSONCompact", "NULL"];
for format in &format_options {
let sql = format!("SELECT * FROM t FORMAT {}", format);
let sql = format!("SELECT * FROM t FORMAT {format}");
match clickhouse_and_generic().verified_stmt(&sql) {
Statement::Query(query) => {
if *format == "NULL" {
@ -1546,11 +1619,11 @@ fn parse_select_table_function_settings() {
settings: Some(vec![
Setting {
key: "s0".into(),
value: Value::Number("3".parse().unwrap(), false),
value: Expr::value(number("3")),
},
Setting {
key: "s1".into(),
value: Value::SingleQuotedString("s".into()),
value: Expr::value(single_quoted_string("s")),
},
]),
},
@ -1571,11 +1644,11 @@ fn parse_select_table_function_settings() {
settings: Some(vec![
Setting {
key: "s0".into(),
value: Value::Number("3".parse().unwrap(), false),
value: Expr::value(number("3")),
},
Setting {
key: "s1".into(),
value: Value::SingleQuotedString("s".into()),
value: Expr::value(single_quoted_string("s")),
},
]),
},
@ -1585,7 +1658,6 @@ fn parse_select_table_function_settings() {
"SELECT * FROM t(SETTINGS a=)",
"SELECT * FROM t(SETTINGS a=1, b)",
"SELECT * FROM t(SETTINGS a=1, b=)",
"SELECT * FROM t(SETTINGS a=1, b=c)",
];
for sql in invalid_cases {
clickhouse_and_generic()

File diff suppressed because it is too large Load diff

View file

@ -19,6 +19,7 @@ use sqlparser::ast::helpers::attached_token::AttachedToken;
use sqlparser::ast::*;
use sqlparser::dialect::{DatabricksDialect, GenericDialect};
use sqlparser::parser::ParserError;
use sqlparser::tokenizer::Span;
use test_utils::*;
#[macro_use]
@ -213,7 +214,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
databricks().verified_stmt(&format!("USE {}", object_name)),
databricks().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -221,7 +222,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
databricks().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
databricks().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),
@ -233,21 +234,21 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with keyword and different type of quotes
assert_eq!(
databricks().verified_stmt(&format!("USE CATALOG {0}my_catalog{0}", quote)),
databricks().verified_stmt(&format!("USE CATALOG {quote}my_catalog{quote}")),
Statement::Use(Use::Catalog(ObjectName::from(vec![Ident::with_quote(
quote,
"my_catalog".to_string(),
)])))
);
assert_eq!(
databricks().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)),
databricks().verified_stmt(&format!("USE DATABASE {quote}my_database{quote}")),
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
quote,
"my_database".to_string(),
)])))
);
assert_eq!(
databricks().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)),
databricks().verified_stmt(&format!("USE SCHEMA {quote}my_schema{quote}")),
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
quote,
"my_schema".to_string(),
@ -328,7 +329,10 @@ fn data_type_timestamp_ntz() {
databricks().verified_expr("TIMESTAMP_NTZ '2025-03-29T18:52:00'"),
Expr::TypedString {
data_type: DataType::TimestampNtz,
value: Value::SingleQuotedString("2025-03-29T18:52:00".to_owned())
value: ValueWithSpan {
value: Value::SingleQuotedString("2025-03-29T18:52:00".to_owned()),
span: Span::empty(),
}
}
);
@ -357,6 +361,6 @@ fn data_type_timestamp_ntz() {
}]
);
}
s => panic!("Unexpected statement: {:?}", s),
s => panic!("Unexpected statement: {s:?}"),
}
}

View file

@ -24,6 +24,7 @@ use test_utils::*;
use sqlparser::ast::*;
use sqlparser::dialect::{DuckDbDialect, GenericDialect};
use sqlparser::parser::ParserError;
fn duckdb() -> TestedDialects {
TestedDialects::new(vec![Box::new(DuckDbDialect {})])
@ -44,10 +45,12 @@ fn test_struct() {
StructField {
field_name: Some(Ident::new("v")),
field_type: DataType::Varchar(None),
options: None,
},
StructField {
field_name: Some(Ident::new("i")),
field_type: DataType::Integer(None),
options: None,
},
],
StructBracketKind::Parentheses,
@ -84,6 +87,7 @@ fn test_struct() {
StructField {
field_name: Some(Ident::new("v")),
field_type: DataType::Varchar(None),
options: None,
},
StructField {
field_name: Some(Ident::new("s")),
@ -92,14 +96,17 @@ fn test_struct() {
StructField {
field_name: Some(Ident::new("a1")),
field_type: DataType::Integer(None),
options: None,
},
StructField {
field_name: Some(Ident::new("a2")),
field_type: DataType::Varchar(None),
options: None,
},
],
StructBracketKind::Parentheses,
),
options: None,
},
],
StructBracketKind::Parentheses,
@ -362,7 +369,7 @@ fn test_duckdb_specific_int_types() {
("HUGEINT", DataType::HugeInt),
];
for (dtype_string, data_type) in duckdb_dtypes {
let sql = format!("SELECT 123::{}", dtype_string);
let sql = format!("SELECT 123::{dtype_string}");
let select = duckdb().verified_only_select(&sql);
assert_eq!(
&Expr::Cast {
@ -786,7 +793,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
duckdb().verified_stmt(&format!("USE {}", object_name)),
duckdb().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -794,7 +801,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
duckdb().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
duckdb().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),
@ -806,7 +813,9 @@ fn parse_use() {
for &quote in &quote_styles {
// Test double identifier with different type of quotes
assert_eq!(
duckdb().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)),
duckdb().verified_stmt(&format!(
"USE {quote}CATALOG{quote}.{quote}my_schema{quote}"
)),
Statement::Use(Use::Object(ObjectName::from(vec![
Ident::with_quote(quote, "CATALOG"),
Ident::with_quote(quote, "my_schema")
@ -822,3 +831,32 @@ fn parse_use() {
])))
);
}
#[test]
fn test_duckdb_trim() {
let real_sql = r#"SELECT customer_id, TRIM(item_price_id, '"', "a") AS item_price_id FROM models_staging.subscriptions"#;
assert_eq!(duckdb().verified_stmt(real_sql).to_string(), real_sql);
let sql_only_select = "SELECT TRIM('xyz', 'a')";
let select = duckdb().verified_only_select(sql_only_select);
assert_eq!(
&Expr::Trim {
expr: Box::new(Expr::Value(
Value::SingleQuotedString("xyz".to_owned()).with_empty_span()
)),
trim_where: None,
trim_what: None,
trim_characters: Some(vec![Expr::Value(
Value::SingleQuotedString("a".to_owned()).with_empty_span()
)]),
},
expr_from_projection(only(&select.projection))
);
// missing comma separation
let error_sql = "SELECT TRIM('xyz' 'a')";
assert_eq!(
ParserError::ParserError("Expected: ), found: 'a'".to_owned()),
duckdb().parse_sql_statements(error_sql).unwrap_err()
);
}

View file

@ -524,7 +524,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
hive().verified_stmt(&format!("USE {}", object_name)),
hive().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -532,7 +532,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
hive().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
hive().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),

View file

@ -153,7 +153,8 @@ fn parse_create_procedure() {
quote_style: None,
span: Span::empty(),
},
data_type: DataType::Int(None)
data_type: DataType::Int(None),
mode: None,
},
ProcedureParam {
name: Ident {
@ -164,14 +165,16 @@ fn parse_create_procedure() {
data_type: DataType::Varchar(Some(CharacterLength::IntegerLength {
length: 256,
unit: None
}))
})),
mode: None,
}
]),
name: ObjectName::from(vec![Ident {
value: "test".into(),
quote_style: None,
span: Span::empty(),
}])
}]),
language: None,
}
)
}
@ -1670,7 +1673,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
ms().verified_stmt(&format!("USE {}", object_name)),
ms().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -1678,7 +1681,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
ms().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
ms().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),
@ -2184,7 +2187,7 @@ fn parse_mssql_if_else() {
"IF 1 = 1 BEGIN SET @A = 1; END ELSE SET @A = 2;"
);
}
_ => panic!("Unexpected statements: {:?}", stmts),
_ => panic!("Unexpected statements: {stmts:?}"),
}
}
@ -2234,7 +2237,7 @@ fn test_mssql_if_statements_span() {
Span::new(Location::new(1, 21), Location::new(1, 36))
);
}
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Blocks
@ -2255,7 +2258,7 @@ fn test_mssql_if_statements_span() {
Span::new(Location::new(1, 32), Location::new(1, 57))
);
}
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
}

View file

@ -593,7 +593,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
mysql_and_generic().verified_stmt(&format!("USE {}", object_name)),
mysql_and_generic().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -601,8 +601,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
mysql_and_generic()
.verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
mysql_and_generic().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),
@ -670,6 +669,20 @@ fn table_constraint_unique_primary_ctor(
characteristics: Option<ConstraintCharacteristics>,
unique_index_type_display: Option<KeyOrIndexDisplay>,
) -> TableConstraint {
let columns = columns
.into_iter()
.map(|ident| IndexColumn {
column: OrderByExpr {
expr: Expr::Identifier(ident),
options: OrderByOptions {
asc: None,
nulls_first: None,
},
with_fill: None,
},
operator_class: None,
})
.collect();
match unique_index_type_display {
Some(index_type_display) => TableConstraint::Unique {
name,
@ -795,6 +808,67 @@ fn parse_create_table_primary_and_unique_key_with_index_options() {
}
}
#[test]
fn parse_prefix_key_part() {
let expected = vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::value(
number("10"),
)))];
for sql in [
"CREATE INDEX idx_index ON t(textcol(10))",
"ALTER TABLE tab ADD INDEX idx_index (textcol(10))",
"ALTER TABLE tab ADD PRIMARY KEY (textcol(10))",
"ALTER TABLE tab ADD UNIQUE KEY (textcol(10))",
"ALTER TABLE tab ADD UNIQUE KEY (textcol(10))",
"ALTER TABLE tab ADD FULLTEXT INDEX (textcol(10))",
"CREATE TABLE t (textcol TEXT, INDEX idx_index (textcol(10)))",
] {
match index_column(mysql_and_generic().verified_stmt(sql)) {
Expr::Function(Function {
name,
args: FunctionArguments::List(FunctionArgumentList { args, .. }),
..
}) => {
assert_eq!(name.to_string(), "textcol");
assert_eq!(args, expected);
}
expr => panic!("unexpected expression {expr} for {sql}"),
}
}
}
#[test]
fn test_functional_key_part() {
assert_eq!(
index_column(
mysql_and_generic()
.verified_stmt("CREATE INDEX idx_index ON t((col COLLATE utf8mb4_bin) DESC)")
),
Expr::Nested(Box::new(Expr::Collate {
expr: Box::new(Expr::Identifier("col".into())),
collation: ObjectName(vec![sqlparser::ast::ObjectNamePart::Identifier(
Ident::new("utf8mb4_bin")
)]),
}))
);
assert_eq!(
index_column(mysql_and_generic().verified_stmt(
r#"CREATE TABLE t (jsoncol JSON, PRIMARY KEY ((CAST(col ->> '$.id' AS UNSIGNED)) ASC))"#
)),
Expr::Nested(Box::new(Expr::Cast {
kind: CastKind::Cast,
expr: Box::new(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new("col"))),
op: BinaryOperator::LongArrow,
right: Box::new(Expr::Value(
Value::SingleQuotedString("$.id".to_string()).with_empty_span()
)),
}),
data_type: DataType::Unsigned,
format: None,
})),
);
}
#[test]
fn parse_create_table_primary_and_unique_key_with_index_type() {
let sqls = ["UNIQUE", "PRIMARY KEY"].map(|key_ty| {
@ -2188,11 +2262,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
assert_eq!(&[Ident::new("t"), Ident::new("15to29")], &parts[..]);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Case 2: Qualified column name that starts with digits and on its own represents a number.
@ -2202,11 +2276,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
assert_eq!(&[Ident::new("t"), Ident::new("15e29")], &parts[..]);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Case 3: Unqualified, the same token is parsed as a number.
@ -2220,11 +2294,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
Some(SelectItem::UnnamedExpr(Expr::Value(ValueWithSpan { value, .. }))) => {
assert_eq!(&number("15e29"), value);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Case 4: Quoted simple identifier.
@ -2234,11 +2308,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
Some(SelectItem::UnnamedExpr(Expr::Identifier(name))) => {
assert_eq!(&Ident::with_quote('`', "15e29"), name);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Case 5: Quoted compound identifier.
@ -2251,11 +2325,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
&parts[..]
);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Case 6: Multi-level compound identifiers.
@ -2272,11 +2346,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
&parts[..]
);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Case 7: Multi-level compound quoted identifiers.
@ -2293,11 +2367,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
&parts[..]
);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
}
@ -2802,7 +2876,7 @@ fn parse_alter_table_with_algorithm() {
vec![
AlterTableOperation::DropColumn {
has_column_keyword: true,
column_name: Ident::new("password_digest"),
column_names: vec![Ident::new("password_digest")],
if_exists: false,
drop_behavior: None,
},
@ -2850,7 +2924,7 @@ fn parse_alter_table_with_lock() {
vec![
AlterTableOperation::DropColumn {
has_column_keyword: true,
column_name: Ident::new("password_digest"),
column_names: vec![Ident::new("password_digest")],
if_exists: false,
drop_behavior: None,
},
@ -3542,6 +3616,7 @@ fn parse_grant() {
with_grant_option,
as_grantor: _,
granted_by,
current_grants: _,
} = stmt
{
assert_eq!(
@ -4035,3 +4110,28 @@ fn parse_alter_table_drop_index() {
AlterTableOperation::DropIndex { name } if name.value == "idx_index"
);
}
#[test]
fn parse_json_member_of() {
mysql().verified_stmt(r#"SELECT 17 MEMBER OF('[23, "abc", 17, "ab", 10]')"#);
let sql = r#"SELECT 'ab' MEMBER OF('[23, "abc", 17, "ab", 10]')"#;
let stmt = mysql().verified_stmt(sql);
match stmt {
Statement::Query(query) => {
let select = query.body.as_select().unwrap();
assert_eq!(
select.projection,
vec![SelectItem::UnnamedExpr(Expr::MemberOf(MemberOf {
value: Box::new(Expr::Value(
Value::SingleQuotedString("ab".to_string()).into()
)),
array: Box::new(Expr::Value(
Value::SingleQuotedString(r#"[23, "abc", 17, "ab", 10]"#.to_string())
.into()
)),
}))]
);
}
_ => panic!("Unexpected statement {stmt}"),
}
}

View file

@ -606,9 +606,10 @@ fn parse_alter_table_constraints_unique_nulls_distinct() {
.verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE NULLS NOT DISTINCT (c)")
{
Statement::AlterTable { operations, .. } => match &operations[0] {
AlterTableOperation::AddConstraint(TableConstraint::Unique {
nulls_distinct, ..
}) => {
AlterTableOperation::AddConstraint {
constraint: TableConstraint::Unique { nulls_distinct, .. },
..
} => {
assert_eq!(nulls_distinct, &NullsDistinctOption::NotDistinct)
}
_ => unreachable!(),
@ -764,10 +765,7 @@ fn parse_drop_extension() {
#[test]
fn parse_alter_table_alter_column() {
pg().one_statement_parses_to(
"ALTER TABLE tab ALTER COLUMN is_active TYPE TEXT USING 'text'",
"ALTER TABLE tab ALTER COLUMN is_active SET DATA TYPE TEXT USING 'text'",
);
pg().verified_stmt("ALTER TABLE tab ALTER COLUMN is_active TYPE TEXT USING 'text'");
match alter_table_op(
pg().verified_stmt(
@ -783,6 +781,7 @@ fn parse_alter_table_alter_column() {
AlterColumnOperation::SetDataType {
data_type: DataType::Text,
using: Some(using_expr),
had_set: true,
}
);
}
@ -2535,12 +2534,12 @@ fn parse_create_indices_with_operator_classes() {
for expected_operator_class in &operator_classes {
let single_column_sql_statement = format!(
"CREATE INDEX the_index_name ON users USING {expected_index_type} (concat_users_name(first_name, last_name){})",
expected_operator_class.as_ref().map(|oc| format!(" {}", oc))
expected_operator_class.as_ref().map(|oc| format!(" {oc}"))
.unwrap_or_default()
);
let multi_column_sql_statement = format!(
"CREATE INDEX the_index_name ON users USING {expected_index_type} (column_name,concat_users_name(first_name, last_name){})",
expected_operator_class.as_ref().map(|oc| format!(" {}", oc))
expected_operator_class.as_ref().map(|oc| format!(" {oc}"))
.unwrap_or_default()
);
@ -3273,7 +3272,7 @@ fn test_fn_arg_with_value_operator() {
assert!(matches!(
&args[..],
&[FunctionArg::ExprNamed { operator: FunctionArgOperator::Value, .. }]
), "Invalid function argument: {:?}", args);
), "Invalid function argument: {args:?}");
}
other => panic!("Expected: JSON_OBJECT('name' VALUE 'value') to be parsed as a function, but got {other:?}"),
}
@ -5258,7 +5257,10 @@ fn parse_at_time_zone() {
left: Box::new(Expr::AtTimeZone {
timestamp: Box::new(Expr::TypedString {
data_type: DataType::Timestamp(None, TimezoneInfo::None),
value: ValueWithSpan {
value: Value::SingleQuotedString("2001-09-28 01:00".to_string()),
span: Span::empty(),
},
}),
time_zone: Box::new(Expr::Cast {
kind: CastKind::DoubleColon,
@ -5679,7 +5681,7 @@ fn parse_drop_trigger() {
"DROP TRIGGER{} check_update ON table_name{}",
if if_exists { " IF EXISTS" } else { "" },
option
.map(|o| format!(" {}", o))
.map(|o| format!(" {o}"))
.unwrap_or_else(|| "".to_string())
);
assert_eq!(
@ -5773,8 +5775,7 @@ fn parse_trigger_related_functions() {
// Now we parse the statements and check if they are parsed correctly.
let mut statements = pg()
.parse_sql_statements(&format!(
"{}{}{}{}",
sql_table_creation, sql_create_function, sql_create_trigger, sql_drop_trigger
"{sql_table_creation}{sql_create_function}{sql_create_trigger}{sql_drop_trigger}"
))
.unwrap();
@ -6201,3 +6202,153 @@ fn parse_alter_table_replica_identity() {
_ => unreachable!(),
}
}
#[test]
fn parse_ts_datatypes() {
match pg_and_generic().verified_stmt("CREATE TABLE foo (x TSVECTOR)") {
Statement::CreateTable(CreateTable { columns, .. }) => {
assert_eq!(
columns,
vec![ColumnDef {
name: "x".into(),
data_type: DataType::TsVector,
options: vec![],
}]
);
}
_ => unreachable!(),
}
match pg_and_generic().verified_stmt("CREATE TABLE foo (x TSQUERY)") {
Statement::CreateTable(CreateTable { columns, .. }) => {
assert_eq!(
columns,
vec![ColumnDef {
name: "x".into(),
data_type: DataType::TsQuery,
options: vec![],
}]
);
}
_ => unreachable!(),
}
}
#[test]
fn parse_alter_table_constraint_not_valid() {
match pg_and_generic().verified_stmt(
"ALTER TABLE foo ADD CONSTRAINT bar FOREIGN KEY (baz) REFERENCES other(ref) NOT VALID",
) {
Statement::AlterTable { operations, .. } => {
assert_eq!(
operations,
vec![AlterTableOperation::AddConstraint {
constraint: TableConstraint::ForeignKey {
name: Some("bar".into()),
index_name: None,
columns: vec!["baz".into()],
foreign_table: ObjectName::from(vec!["other".into()]),
referred_columns: vec!["ref".into()],
on_delete: None,
on_update: None,
characteristics: None,
},
not_valid: true,
}]
);
}
_ => unreachable!(),
}
}
#[test]
fn parse_alter_table_validate_constraint() {
match pg_and_generic().verified_stmt("ALTER TABLE foo VALIDATE CONSTRAINT bar") {
Statement::AlterTable { operations, .. } => {
assert_eq!(
operations,
vec![AlterTableOperation::ValidateConstraint { name: "bar".into() }]
);
}
_ => unreachable!(),
}
}
#[test]
fn parse_create_server() {
let test_cases = vec![
(
"CREATE SERVER myserver FOREIGN DATA WRAPPER postgres_fdw",
CreateServerStatement {
name: ObjectName::from(vec!["myserver".into()]),
if_not_exists: false,
server_type: None,
version: None,
foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]),
options: None,
},
),
(
"CREATE SERVER IF NOT EXISTS myserver TYPE 'server_type' VERSION 'server_version' FOREIGN DATA WRAPPER postgres_fdw",
CreateServerStatement {
name: ObjectName::from(vec!["myserver".into()]),
if_not_exists: true,
server_type: Some(Ident {
value: "server_type".to_string(),
quote_style: Some('\''),
span: Span::empty(),
}),
version: Some(Ident {
value: "server_version".to_string(),
quote_style: Some('\''),
span: Span::empty(),
}),
foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]),
options: None,
}
),
(
"CREATE SERVER myserver2 FOREIGN DATA WRAPPER postgres_fdw OPTIONS (host 'foo', dbname 'foodb', port '5432')",
CreateServerStatement {
name: ObjectName::from(vec!["myserver2".into()]),
if_not_exists: false,
server_type: None,
version: None,
foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]),
options: Some(vec![
CreateServerOption {
key: "host".into(),
value: Ident {
value: "foo".to_string(),
quote_style: Some('\''),
span: Span::empty(),
},
},
CreateServerOption {
key: "dbname".into(),
value: Ident {
value: "foodb".to_string(),
quote_style: Some('\''),
span: Span::empty(),
},
},
CreateServerOption {
key: "port".into(),
value: Ident {
value: "5432".to_string(),
quote_style: Some('\''),
span: Span::empty(),
},
},
]),
}
)
];
for (sql, expected) in test_cases {
let Statement::CreateServer(stmt) = pg_and_generic().verified_stmt(sql) else {
unreachable!()
};
assert_eq!(stmt, expected);
}
}

View file

@ -402,3 +402,8 @@ fn parse_extract_single_quotes() {
fn parse_string_literal_backslash_escape() {
redshift().one_statement_parses_to(r#"SELECT 'l\'auto'"#, "SELECT 'l''auto'");
}
#[test]
fn parse_utf8_multibyte_idents() {
redshift().verified_stmt("SELECT 🚀.city AS 🎸 FROM customers AS 🚀");
}

View file

@ -270,8 +270,8 @@ fn test_snowflake_create_table_with_tag() {
assert_eq!("my_table", name.to_string());
assert_eq!(
Some(vec![
Tag::new("A".into(), "TAG A".to_string()),
Tag::new("B".into(), "TAG B".to_string())
Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".to_string()),
Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".to_string())
]),
with_tags
);
@ -291,8 +291,8 @@ fn test_snowflake_create_table_with_tag() {
assert_eq!("my_table", name.to_string());
assert_eq!(
Some(vec![
Tag::new("A".into(), "TAG A".to_string()),
Tag::new("B".into(), "TAG B".to_string())
Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".to_string()),
Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".to_string())
]),
with_tags
);
@ -731,7 +731,7 @@ fn test_snowflake_create_table_with_columns_masking_policy() {
option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy(
ColumnPolicyProperty {
with,
policy_name: "p".into(),
policy_name: ObjectName::from(vec![Ident::new("p")]),
using_columns,
}
))
@ -765,7 +765,7 @@ fn test_snowflake_create_table_with_columns_projection_policy() {
option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(
ColumnPolicyProperty {
with,
policy_name: "p".into(),
policy_name: ObjectName::from(vec![Ident::new("p")]),
using_columns: None,
}
))
@ -802,8 +802,14 @@ fn test_snowflake_create_table_with_columns_tags() {
option: ColumnOption::Tags(TagsColumnOption {
with,
tags: vec![
Tag::new("A".into(), "TAG A".into()),
Tag::new("B".into(), "TAG B".into()),
Tag::new(
ObjectName::from(vec![Ident::new("A")]),
"TAG A".into()
),
Tag::new(
ObjectName::from(vec![Ident::new("B")]),
"TAG B".into()
),
]
}),
}],
@ -846,7 +852,7 @@ fn test_snowflake_create_table_with_several_column_options() {
option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy(
ColumnPolicyProperty {
with: true,
policy_name: "p1".into(),
policy_name: ObjectName::from(vec![Ident::new("p1")]),
using_columns: Some(vec!["a".into(), "b".into()]),
}
)),
@ -856,8 +862,14 @@ fn test_snowflake_create_table_with_several_column_options() {
option: ColumnOption::Tags(TagsColumnOption {
with: true,
tags: vec![
Tag::new("A".into(), "TAG A".into()),
Tag::new("B".into(), "TAG B".into()),
Tag::new(
ObjectName::from(vec![Ident::new("A")]),
"TAG A".into()
),
Tag::new(
ObjectName::from(vec![Ident::new("B")]),
"TAG B".into()
),
]
}),
}
@ -878,7 +890,7 @@ fn test_snowflake_create_table_with_several_column_options() {
option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(
ColumnPolicyProperty {
with: false,
policy_name: "p2".into(),
policy_name: ObjectName::from(vec![Ident::new("p2")]),
using_columns: None,
}
)),
@ -888,8 +900,14 @@ fn test_snowflake_create_table_with_several_column_options() {
option: ColumnOption::Tags(TagsColumnOption {
with: false,
tags: vec![
Tag::new("C".into(), "TAG C".into()),
Tag::new("D".into(), "TAG D".into()),
Tag::new(
ObjectName::from(vec![Ident::new("C")]),
"TAG C".into()
),
Tag::new(
ObjectName::from(vec![Ident::new("D")]),
"TAG D".into()
),
]
}),
}
@ -942,8 +960,8 @@ fn test_snowflake_create_iceberg_table_all_options() {
with_aggregation_policy.map(|name| name.to_string())
);
assert_eq!(Some(vec![
Tag::new("A".into(), "TAG A".into()),
Tag::new("B".into(), "TAG B".into()),
Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".into()),
Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".into()),
]), with_tags);
}
@ -2510,10 +2528,7 @@ fn test_snowflake_stage_object_names_into_location() {
.zip(allowed_object_names.iter_mut())
{
let (formatted_name, object_name) = it;
let sql = format!(
"COPY INTO {} FROM 'gcs://mybucket/./../a.csv'",
formatted_name
);
let sql = format!("COPY INTO {formatted_name} FROM 'gcs://mybucket/./../a.csv'");
match snowflake().verified_stmt(&sql) {
Statement::CopyIntoSnowflake { into, .. } => {
assert_eq!(into.0, object_name.0)
@ -2536,10 +2551,7 @@ fn test_snowflake_stage_object_names_into_table() {
.zip(allowed_object_names.iter_mut())
{
let (formatted_name, object_name) = it;
let sql = format!(
"COPY INTO {} FROM 'gcs://mybucket/./../a.csv'",
formatted_name
);
let sql = format!("COPY INTO {formatted_name} FROM 'gcs://mybucket/./../a.csv'");
match snowflake().verified_stmt(&sql) {
Statement::CopyIntoSnowflake { into, .. } => {
assert_eq!(into.0, object_name.0)
@ -3020,7 +3032,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
snowflake().verified_stmt(&format!("USE {}", object_name)),
snowflake().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -3028,7 +3040,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
snowflake().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
snowflake().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),
@ -3040,7 +3052,9 @@ fn parse_use() {
for &quote in &quote_styles {
// Test double identifier with different type of quotes
assert_eq!(
snowflake().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)),
snowflake().verified_stmt(&format!(
"USE {quote}CATALOG{quote}.{quote}my_schema{quote}"
)),
Statement::Use(Use::Object(ObjectName::from(vec![
Ident::with_quote(quote, "CATALOG"),
Ident::with_quote(quote, "my_schema")
@ -3059,35 +3073,37 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single and double identifier with keyword and different type of quotes
assert_eq!(
snowflake().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)),
snowflake().verified_stmt(&format!("USE DATABASE {quote}my_database{quote}")),
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
quote,
"my_database".to_string(),
)])))
);
assert_eq!(
snowflake().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)),
snowflake().verified_stmt(&format!("USE SCHEMA {quote}my_schema{quote}")),
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
quote,
"my_schema".to_string(),
)])))
);
assert_eq!(
snowflake().verified_stmt(&format!("USE SCHEMA {0}CATALOG{0}.{0}my_schema{0}", quote)),
snowflake().verified_stmt(&format!(
"USE SCHEMA {quote}CATALOG{quote}.{quote}my_schema{quote}"
)),
Statement::Use(Use::Schema(ObjectName::from(vec![
Ident::with_quote(quote, "CATALOG"),
Ident::with_quote(quote, "my_schema")
])))
);
assert_eq!(
snowflake().verified_stmt(&format!("USE ROLE {0}my_role{0}", quote)),
snowflake().verified_stmt(&format!("USE ROLE {quote}my_role{quote}")),
Statement::Use(Use::Role(ObjectName::from(vec![Ident::with_quote(
quote,
"my_role".to_string(),
)])))
);
assert_eq!(
snowflake().verified_stmt(&format!("USE WAREHOUSE {0}my_wh{0}", quote)),
snowflake().verified_stmt(&format!("USE WAREHOUSE {quote}my_wh{quote}")),
Statement::Use(Use::Warehouse(ObjectName::from(vec![Ident::with_quote(
quote,
"my_wh".to_string(),
@ -3124,7 +3140,7 @@ fn view_comment_option_should_be_after_column_list() {
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') COMMENT = 'Comment' AS SELECT a FROM t",
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') WITH (foo = bar) COMMENT = 'Comment' AS SELECT a FROM t",
] {
snowflake_and_generic()
snowflake()
.verified_stmt(sql);
}
}
@ -3133,7 +3149,7 @@ fn view_comment_option_should_be_after_column_list() {
fn parse_view_column_descriptions() {
let sql = "CREATE OR REPLACE VIEW v (a COMMENT 'Comment', b) AS SELECT a, b FROM table1";
match snowflake_and_generic().verified_stmt(sql) {
match snowflake().verified_stmt(sql) {
Statement::CreateView { name, columns, .. } => {
assert_eq!(name.to_string(), "v");
assert_eq!(
@ -3142,7 +3158,9 @@ fn parse_view_column_descriptions() {
ViewColumnDef {
name: Ident::new("a"),
data_type: None,
options: Some(vec![ColumnOption::Comment("Comment".to_string())]),
options: Some(ColumnOptions::SpaceSeparated(vec![ColumnOption::Comment(
"Comment".to_string()
)])),
},
ViewColumnDef {
name: Ident::new("b"),
@ -3397,10 +3415,38 @@ fn parse_ls_and_rm() {
.unwrap();
}
#[test]
fn test_sql_keywords_as_select_item_ident() {
// Some keywords that should be parsed as an alias
let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT", "SORT"];
for kw in unreserved_kws {
snowflake().verified_stmt(&format!("SELECT 1, {kw}"));
}
// Some keywords that should not be parsed as an alias
let reserved_kws = vec![
"FROM",
"GROUP",
"HAVING",
"INTERSECT",
"INTO",
"ORDER",
"SELECT",
"UNION",
"WHERE",
"WITH",
];
for kw in reserved_kws {
assert!(snowflake()
.parse_sql_statements(&format!("SELECT 1, {kw}"))
.is_err());
}
}
#[test]
fn test_sql_keywords_as_select_item_aliases() {
// Some keywords that should be parsed as an alias
let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT"];
let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT", "SORT"];
for kw in unreserved_kws {
snowflake()
.one_statement_parses_to(&format!("SELECT 1 {kw}"), &format!("SELECT 1 AS {kw}"));
@ -3627,7 +3673,7 @@ fn test_alter_session_followed_by_statement() {
.unwrap();
match stmts[..] {
[Statement::AlterSession { .. }, Statement::Query { .. }] => {}
_ => panic!("Unexpected statements: {:?}", stmts),
_ => panic!("Unexpected statements: {stmts:?}"),
}
}
@ -4082,3 +4128,107 @@ fn parse_connect_by_root_operator() {
"sql parser error: Expected an expression, found: FROM"
);
}
#[test]
fn test_begin_exception_end() {
for sql in [
"BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE; END",
"BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE EX_1; END",
"BEGIN SELECT 1; EXCEPTION WHEN FOO THEN SELECT 2; WHEN OTHER THEN SELECT 3; RAISE; END",
"BEGIN BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE; END; END",
] {
snowflake().verified_stmt(sql);
}
let sql = r#"
DECLARE
EXCEPTION_1 EXCEPTION (-20001, 'I caught the expected exception.');
EXCEPTION_2 EXCEPTION (-20002, 'Not the expected exception!');
EXCEPTION_3 EXCEPTION (-20003, 'The worst exception...');
BEGIN
BEGIN
SELECT 1;
EXCEPTION
WHEN EXCEPTION_1 THEN
SELECT 1;
WHEN EXCEPTION_2 OR EXCEPTION_3 THEN
SELECT 2;
SELECT 3;
WHEN OTHER THEN
SELECT 4;
RAISE;
END;
END
"#;
// Outer `BEGIN` of the two nested `BEGIN` statements.
let Statement::StartTransaction { mut statements, .. } = snowflake()
.parse_sql_statements(sql)
.unwrap()
.pop()
.unwrap()
else {
unreachable!();
};
// Inner `BEGIN` of the two nested `BEGIN` statements.
let Statement::StartTransaction {
statements,
exception,
has_end_keyword,
..
} = statements.pop().unwrap()
else {
unreachable!();
};
assert_eq!(1, statements.len());
assert!(has_end_keyword);
let exception = exception.unwrap();
assert_eq!(3, exception.len());
assert_eq!(1, exception[0].idents.len());
assert_eq!(1, exception[0].statements.len());
assert_eq!(2, exception[1].idents.len());
assert_eq!(2, exception[1].statements.len());
}
#[test]
fn test_snowflake_fetch_clause_syntax() {
let canonical = "SELECT c1 FROM fetch_test FETCH FIRST 2 ROWS ONLY";
snowflake().verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH 2", canonical);
snowflake()
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH FIRST 2", canonical);
snowflake()
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH NEXT 2", canonical);
snowflake()
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH 2 ROW", canonical);
snowflake().verified_only_select_with_canonical(
"SELECT c1 FROM fetch_test FETCH FIRST 2 ROWS",
canonical,
);
}
#[test]
fn test_snowflake_create_view_with_multiple_column_options() {
let create_view_with_tag =
r#"CREATE VIEW X (COL WITH TAG (pii='email') COMMENT 'foobar') AS SELECT * FROM Y"#;
snowflake().verified_stmt(create_view_with_tag);
}
#[test]
fn test_snowflake_create_view_with_composite_tag() {
let create_view_with_tag =
r#"CREATE VIEW X (COL WITH TAG (foo.bar.baz.pii='email')) AS SELECT * FROM Y"#;
snowflake().verified_stmt(create_view_with_tag);
}
#[test]
fn test_snowflake_create_view_with_composite_policy_name() {
let create_view_with_tag =
r#"CREATE VIEW X (COL WITH MASKING POLICY foo.bar.baz) AS SELECT * FROM Y"#;
snowflake().verified_stmt(create_view_with_tag);
}

View file

@ -324,7 +324,7 @@ fn parse_create_table_on_conflict_col() {
Keyword::IGNORE,
Keyword::REPLACE,
] {
let sql = format!("CREATE TABLE t1 (a INT, b INT ON CONFLICT {:?})", keyword);
let sql = format!("CREATE TABLE t1 (a INT, b INT ON CONFLICT {keyword:?})");
match sqlite_and_generic().verified_stmt(&sql) {
Statement::CreateTable(CreateTable { columns, .. }) => {
assert_eq!(
@ -410,7 +410,7 @@ fn parse_window_function_with_filter() {
"count",
"user_defined_function",
] {
let sql = format!("SELECT {}(x) FILTER (WHERE y) OVER () FROM t", func_name);
let sql = format!("SELECT {func_name}(x) FILTER (WHERE y) OVER () FROM t");
let select = sqlite().verified_only_select(&sql);
assert_eq!(select.to_string(), sql);
assert_eq!(
@ -444,7 +444,7 @@ fn parse_window_function_with_filter() {
fn parse_attach_database() {
let sql = "ATTACH DATABASE 'test.db' AS test";
let verified_stmt = sqlite().verified_stmt(sql);
assert_eq!(sql, format!("{}", verified_stmt));
assert_eq!(sql, format!("{verified_stmt}"));
match verified_stmt {
Statement::AttachDatabase {
schema_name,