mirror of
https://github.com/apache/datafusion-sqlparser-rs.git
synced 2025-08-03 22:08:16 +00:00
Compare commits
73 commits
v0.57.0-rc
...
main
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c1648e79fe | ||
![]() |
dd650b88f3 | ||
![]() |
ec0026d136 | ||
![]() |
6932f4ad65 | ||
![]() |
f5f51eb6f1 | ||
![]() |
3d2db8c69b | ||
![]() |
85fa881379 | ||
![]() |
91273703d4 | ||
![]() |
15d8bfea62 | ||
![]() |
bde269b56f | ||
![]() |
97a5b61a73 | ||
![]() |
5ec953bd78 | ||
![]() |
865c191a53 | ||
![]() |
145922affe | ||
![]() |
6a5ef48921 | ||
![]() |
40b187f32a | ||
![]() |
7558d35c84 | ||
![]() |
f49c30feb6 | ||
![]() |
2ed2cbe291 | ||
![]() |
492184643a | ||
![]() |
799c1f748d | ||
![]() |
a73577c29f | ||
![]() |
23f40cdc40 | ||
![]() |
40bbcc9834 | ||
![]() |
5f69df2693 | ||
![]() |
92db20673b | ||
![]() |
4d9338638f | ||
![]() |
650681422a | ||
![]() |
ecd5d88638 | ||
![]() |
c5e6ba5e7d | ||
![]() |
9b9ffe450c | ||
![]() |
750a7aa054 | ||
![]() |
bc2c4e263d | ||
![]() |
ee31b64f9e | ||
![]() |
15f35e1476 | ||
![]() |
fd4934ec74 | ||
![]() |
8f1414efff | ||
![]() |
b1a6d11e12 | ||
![]() |
93450cc250 | ||
![]() |
1a33abda63 | ||
![]() |
f2fba48a7a | ||
![]() |
cf9e50474e | ||
![]() |
ed8757f2f0 | ||
![]() |
d2466af20a | ||
![]() |
b0bcc46e22 | ||
![]() |
942d747d89 | ||
![]() |
239e30a97c | ||
![]() |
9020385c02 | ||
![]() |
be2d2f14e7 | ||
![]() |
418b94227a | ||
![]() |
015caca611 | ||
![]() |
a3398223d7 | ||
![]() |
f32a41a004 | ||
![]() |
9ffc546870 | ||
![]() |
abd80f9ecb | ||
![]() |
3bc94234df | ||
![]() |
50c605a471 | ||
![]() |
6c38cdcadb | ||
![]() |
5f2b5fe7be | ||
![]() |
95d16e3b2d | ||
![]() |
1bbc05cdff | ||
![]() |
b2ab0061c1 | ||
![]() |
b9365b3853 | ||
![]() |
44f3be38e5 | ||
![]() |
5d63663bc6 | ||
![]() |
7865de015f | ||
![]() |
1d0dc7cdd8 | ||
![]() |
204d3b484d | ||
![]() |
185a490218 | ||
![]() |
b1b379e570 | ||
![]() |
be30697efb | ||
![]() |
6f423969b0 | ||
![]() |
e406422bac |
48 changed files with 6985 additions and 1110 deletions
39
.github/workflows/license.yml
vendored
Normal file
39
.github/workflows/license.yml
vendored
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing,
|
||||||
|
# software distributed under the License is distributed on an
|
||||||
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
# KIND, either express or implied. See the License for the
|
||||||
|
# specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
name: license
|
||||||
|
|
||||||
|
# trigger for all PRs and changes to main
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
rat:
|
||||||
|
name: Release Audit Tool (RAT)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: 3.8
|
||||||
|
- name: Audit licenses
|
||||||
|
run: ./dev/release/run-rat.sh .
|
|
@ -18,7 +18,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "sqlparser"
|
name = "sqlparser"
|
||||||
description = "Extensible SQL Lexer and Parser with support for ANSI SQL:2011"
|
description = "Extensible SQL Lexer and Parser with support for ANSI SQL:2011"
|
||||||
version = "0.57.0"
|
version = "0.58.0"
|
||||||
authors = ["Apache DataFusion <dev@datafusion.apache.org>"]
|
authors = ["Apache DataFusion <dev@datafusion.apache.org>"]
|
||||||
homepage = "https://github.com/apache/datafusion-sqlparser-rs"
|
homepage = "https://github.com/apache/datafusion-sqlparser-rs"
|
||||||
documentation = "https://docs.rs/sqlparser/"
|
documentation = "https://docs.rs/sqlparser/"
|
||||||
|
|
|
@ -21,7 +21,7 @@
|
||||||
|
|
||||||
[](https://opensource.org/licenses/Apache-2.0)
|
[](https://opensource.org/licenses/Apache-2.0)
|
||||||
[](https://crates.io/crates/sqlparser)
|
[](https://crates.io/crates/sqlparser)
|
||||||
[](https://github.com/sqlparser-rs/sqlparser-rs/actions?query=workflow%3ARust+branch%3Amain)
|
[](https://github.com/sqlparser-rs/sqlparser-rs/actions?query=workflow%3ARust+branch%3Amain)
|
||||||
[](https://coveralls.io/github/sqlparser-rs/sqlparser-rs?branch=main)
|
[](https://coveralls.io/github/sqlparser-rs/sqlparser-rs?branch=main)
|
||||||
[](https://gitter.im/sqlparser-rs/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
[](https://gitter.im/sqlparser-rs/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||||
|
|
||||||
|
|
106
changelog/0.58.0.md
Normal file
106
changelog/0.58.0.md
Normal file
|
@ -0,0 +1,106 @@
|
||||||
|
<!--
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
or more contributor license agreements. See the NOTICE file
|
||||||
|
distributed with this work for additional information
|
||||||
|
regarding copyright ownership. The ASF licenses this file
|
||||||
|
to you under the Apache License, Version 2.0 (the
|
||||||
|
"License"); you may not use this file except in compliance
|
||||||
|
with the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing,
|
||||||
|
software distributed under the License is distributed on an
|
||||||
|
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
KIND, either express or implied. See the License for the
|
||||||
|
specific language governing permissions and limitations
|
||||||
|
under the License.
|
||||||
|
-->
|
||||||
|
|
||||||
|
# sqlparser-rs 0.58.0 Changelog
|
||||||
|
|
||||||
|
This release consists of 47 commits from 18 contributors. See credits at the end of this changelog for more information.
|
||||||
|
|
||||||
|
**Fixed bugs:**
|
||||||
|
|
||||||
|
- fix: parse snowflake fetch clause [#1894](https://github.com/apache/datafusion-sqlparser-rs/pull/1894) (Vedin)
|
||||||
|
|
||||||
|
**Documentation updates:**
|
||||||
|
|
||||||
|
- docs: Update rust badge [#1943](https://github.com/apache/datafusion-sqlparser-rs/pull/1943) (Olexandr88)
|
||||||
|
|
||||||
|
**Other:**
|
||||||
|
|
||||||
|
- Add license header check to CI [#1888](https://github.com/apache/datafusion-sqlparser-rs/pull/1888) (alamb)
|
||||||
|
- Add support of parsing struct field's options in BigQuery [#1890](https://github.com/apache/datafusion-sqlparser-rs/pull/1890) (git-hulk)
|
||||||
|
- Fix parsing error when having fields after nested struct in BigQuery [#1897](https://github.com/apache/datafusion-sqlparser-rs/pull/1897) (git-hulk)
|
||||||
|
- Extend exception handling [#1884](https://github.com/apache/datafusion-sqlparser-rs/pull/1884) (bombsimon)
|
||||||
|
- Postgres: Add support for text search types [#1889](https://github.com/apache/datafusion-sqlparser-rs/pull/1889) (MohamedAbdeen21)
|
||||||
|
- Fix `limit` in subqueries [#1899](https://github.com/apache/datafusion-sqlparser-rs/pull/1899) (Dimchikkk)
|
||||||
|
- Use `IndexColumn` in all index definitions [#1900](https://github.com/apache/datafusion-sqlparser-rs/pull/1900) (mvzink)
|
||||||
|
- Support procedure argmode [#1901](https://github.com/apache/datafusion-sqlparser-rs/pull/1901) (ZacJW)
|
||||||
|
- Fix `impl Ord for Ident` [#1893](https://github.com/apache/datafusion-sqlparser-rs/pull/1893) (eliaperantoni)
|
||||||
|
- Snowflake: support multiple column options in `CREATE VIEW` [#1891](https://github.com/apache/datafusion-sqlparser-rs/pull/1891) (eliaperantoni)
|
||||||
|
- Add support for `LANGUAGE` clause in `CREATE PROCEDURE` [#1903](https://github.com/apache/datafusion-sqlparser-rs/pull/1903) (ZacJW)
|
||||||
|
- Fix clippy lints on 1.88.0 [#1910](https://github.com/apache/datafusion-sqlparser-rs/pull/1910) (iffyio)
|
||||||
|
- Snowflake: Add support for future grants [#1906](https://github.com/apache/datafusion-sqlparser-rs/pull/1906) (yoavcloud)
|
||||||
|
- Support for Map values in ClickHouse settings [#1896](https://github.com/apache/datafusion-sqlparser-rs/pull/1896) (solontsev)
|
||||||
|
- Fix join precedence for non-snowflake queries [#1905](https://github.com/apache/datafusion-sqlparser-rs/pull/1905) (Dimchikkk)
|
||||||
|
- Support remaining pipe operators [#1879](https://github.com/apache/datafusion-sqlparser-rs/pull/1879) (simonvandel)
|
||||||
|
- Make `GenericDialect` support from-first syntax [#1911](https://github.com/apache/datafusion-sqlparser-rs/pull/1911) (simonvandel)
|
||||||
|
- Redshift utf8 idents [#1915](https://github.com/apache/datafusion-sqlparser-rs/pull/1915) (yoavcloud)
|
||||||
|
- DuckDB: Add support for multiple `TRIM` arguments [#1916](https://github.com/apache/datafusion-sqlparser-rs/pull/1916) (ryanschneider)
|
||||||
|
- Redshift alter column type no set [#1912](https://github.com/apache/datafusion-sqlparser-rs/pull/1912) (yoavcloud)
|
||||||
|
- Postgres: support `ADD CONSTRAINT NOT VALID` and `VALIDATE CONSTRAINT` [#1908](https://github.com/apache/datafusion-sqlparser-rs/pull/1908) (achristmascarl)
|
||||||
|
- Add support for MySQL MEMBER OF [#1917](https://github.com/apache/datafusion-sqlparser-rs/pull/1917) (yoavcloud)
|
||||||
|
- Add span for `Expr::TypedString` [#1919](https://github.com/apache/datafusion-sqlparser-rs/pull/1919) (feral-dot-io)
|
||||||
|
- Support for Postgres `CREATE SERVER` [#1914](https://github.com/apache/datafusion-sqlparser-rs/pull/1914) (solontsev)
|
||||||
|
- Change tag and policy names to `ObjectName` [#1892](https://github.com/apache/datafusion-sqlparser-rs/pull/1892) (eliaperantoni)
|
||||||
|
- Add support for NULL escape char in pattern match searches [#1913](https://github.com/apache/datafusion-sqlparser-rs/pull/1913) (yoavcloud)
|
||||||
|
- Add support for dropping multiple columns in Snowflake [#1918](https://github.com/apache/datafusion-sqlparser-rs/pull/1918) (yoavcloud)
|
||||||
|
- Align Snowflake dialect to new test of reserved keywords [#1924](https://github.com/apache/datafusion-sqlparser-rs/pull/1924) (yoavcloud)
|
||||||
|
- Make `GenericDialect` support trailing commas in projections [#1921](https://github.com/apache/datafusion-sqlparser-rs/pull/1921) (simonvandel)
|
||||||
|
- Add support for several Snowflake grant statements [#1922](https://github.com/apache/datafusion-sqlparser-rs/pull/1922) (yoavcloud)
|
||||||
|
- Clickhouse: support empty parenthesized options [#1925](https://github.com/apache/datafusion-sqlparser-rs/pull/1925) (solontsev)
|
||||||
|
- Add Snowflake `COPY/REVOKE CURRENT GRANTS` option [#1926](https://github.com/apache/datafusion-sqlparser-rs/pull/1926) (yoavcloud)
|
||||||
|
- Add support for Snowflake identifier function [#1929](https://github.com/apache/datafusion-sqlparser-rs/pull/1929) (yoavcloud)
|
||||||
|
- Add support for granting privileges to procedures and functions in Snowflake [#1930](https://github.com/apache/datafusion-sqlparser-rs/pull/1930) (yoavcloud)
|
||||||
|
- Add support for `+` char in Snowflake stage names [#1935](https://github.com/apache/datafusion-sqlparser-rs/pull/1935) (yoavcloud)
|
||||||
|
- Snowflake Reserved SQL Keywords as Implicit Table Alias [#1934](https://github.com/apache/datafusion-sqlparser-rs/pull/1934) (yoavcloud)
|
||||||
|
- Add support for Redshift `SELECT * EXCLUDE` [#1936](https://github.com/apache/datafusion-sqlparser-rs/pull/1936) (yoavcloud)
|
||||||
|
- Support optional semicolon between statements [#1937](https://github.com/apache/datafusion-sqlparser-rs/pull/1937) (yoavcloud)
|
||||||
|
- Snowflake: support trailing options in `CREATE TABLE` [#1931](https://github.com/apache/datafusion-sqlparser-rs/pull/1931) (yoavcloud)
|
||||||
|
- MSSQL: Add support for EXEC output and default keywords [#1940](https://github.com/apache/datafusion-sqlparser-rs/pull/1940) (yoavcloud)
|
||||||
|
- Add identifier unicode support in Mysql, Postgres and Redshift [#1933](https://github.com/apache/datafusion-sqlparser-rs/pull/1933) (etgarperets)
|
||||||
|
- Add identifier start unicode support for Postegres, MySql and Redshift [#1944](https://github.com/apache/datafusion-sqlparser-rs/pull/1944) (etgarperets)
|
||||||
|
- Fix for Postgres regex and like binary operators [#1928](https://github.com/apache/datafusion-sqlparser-rs/pull/1928) (solontsev)
|
||||||
|
- Snowflake: Improve accuracy of lookahead in implicit LIMIT alias [#1941](https://github.com/apache/datafusion-sqlparser-rs/pull/1941) (yoavcloud)
|
||||||
|
- Add support for `DROP USER` statement [#1951](https://github.com/apache/datafusion-sqlparser-rs/pull/1951) (yoavcloud)
|
||||||
|
|
||||||
|
## Credits
|
||||||
|
|
||||||
|
Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor.
|
||||||
|
|
||||||
|
```
|
||||||
|
19 Yoav Cohen
|
||||||
|
4 Sergey Olontsev
|
||||||
|
3 Elia Perantoni
|
||||||
|
3 Simon Vandel Sillesen
|
||||||
|
2 Dima
|
||||||
|
2 ZacJW
|
||||||
|
2 etgarperets
|
||||||
|
2 hulk
|
||||||
|
1 Andrew Lamb
|
||||||
|
1 Denys Tsomenko
|
||||||
|
1 Ifeanyi Ubah
|
||||||
|
1 Michael Victor Zink
|
||||||
|
1 Mohamed Abdeen
|
||||||
|
1 Olexandr88
|
||||||
|
1 Ryan Schneider
|
||||||
|
1 Simon Sawert
|
||||||
|
1 carl
|
||||||
|
1 feral-dot-io
|
||||||
|
```
|
||||||
|
|
||||||
|
Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release.
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
# Files to exclude from the Apache Rat (license) check
|
|
||||||
.gitignore
|
|
||||||
.tool-versions
|
.tool-versions
|
||||||
|
target/*
|
||||||
|
**.gitignore
|
||||||
|
rat.txt
|
||||||
dev/release/rat_exclude_files.txt
|
dev/release/rat_exclude_files.txt
|
||||||
fuzz/.gitignore
|
|
||||||
sqlparser_bench/img/flamegraph.svg
|
sqlparser_bench/img/flamegraph.svg
|
||||||
|
**Cargo.lock
|
||||||
|
filtered_rat.txt
|
||||||
|
|
|
@ -63,7 +63,7 @@ $ cargo run --example cli - [--dialectname]
|
||||||
};
|
};
|
||||||
|
|
||||||
let contents = if filename == "-" {
|
let contents = if filename == "-" {
|
||||||
println!("Parsing from stdin using {:?}", dialect);
|
println!("Parsing from stdin using {dialect:?}");
|
||||||
let mut buf = Vec::new();
|
let mut buf = Vec::new();
|
||||||
stdin()
|
stdin()
|
||||||
.read_to_end(&mut buf)
|
.read_to_end(&mut buf)
|
||||||
|
|
|
@ -26,7 +26,7 @@ edition = "2018"
|
||||||
sqlparser = { path = "../" }
|
sqlparser = { path = "../" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = "0.6"
|
criterion = "0.7"
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "sqlparser_bench"
|
name = "sqlparser_bench"
|
||||||
|
|
|
@ -45,25 +45,24 @@ fn basic_queries(c: &mut Criterion) {
|
||||||
|
|
||||||
let large_statement = {
|
let large_statement = {
|
||||||
let expressions = (0..1000)
|
let expressions = (0..1000)
|
||||||
.map(|n| format!("FN_{}(COL_{})", n, n))
|
.map(|n| format!("FN_{n}(COL_{n})"))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
let tables = (0..1000)
|
let tables = (0..1000)
|
||||||
.map(|n| format!("TABLE_{}", n))
|
.map(|n| format!("TABLE_{n}"))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(" JOIN ");
|
.join(" JOIN ");
|
||||||
let where_condition = (0..1000)
|
let where_condition = (0..1000)
|
||||||
.map(|n| format!("COL_{} = {}", n, n))
|
.map(|n| format!("COL_{n} = {n}"))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(" OR ");
|
.join(" OR ");
|
||||||
let order_condition = (0..1000)
|
let order_condition = (0..1000)
|
||||||
.map(|n| format!("COL_{} DESC", n))
|
.map(|n| format!("COL_{n} DESC"))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
|
|
||||||
format!(
|
format!(
|
||||||
"SELECT {} FROM {} WHERE {} ORDER BY {}",
|
"SELECT {expressions} FROM {tables} WHERE {where_condition} ORDER BY {order_condition}"
|
||||||
expressions, tables, where_condition, order_condition
|
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -346,7 +346,16 @@ pub enum DataType {
|
||||||
/// [1]: https://docs.databricks.com/aws/en/sql/language-manual/data-types/timestamp-ntz-type
|
/// [1]: https://docs.databricks.com/aws/en/sql/language-manual/data-types/timestamp-ntz-type
|
||||||
TimestampNtz,
|
TimestampNtz,
|
||||||
/// Interval type.
|
/// Interval type.
|
||||||
Interval,
|
Interval {
|
||||||
|
/// [PostgreSQL] fields specification like `INTERVAL YEAR TO MONTH`.
|
||||||
|
///
|
||||||
|
/// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-datetime.html
|
||||||
|
fields: Option<IntervalFields>,
|
||||||
|
/// [PostgreSQL] subsecond precision like `INTERVAL HOUR TO SECOND(3)`
|
||||||
|
///
|
||||||
|
/// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-datetime.html
|
||||||
|
precision: Option<u64>,
|
||||||
|
},
|
||||||
/// JSON type.
|
/// JSON type.
|
||||||
JSON,
|
JSON,
|
||||||
/// Binary JSON type.
|
/// Binary JSON type.
|
||||||
|
@ -446,6 +455,14 @@ pub enum DataType {
|
||||||
///
|
///
|
||||||
/// [PostgreSQL]: https://www.postgresql.org/docs/9.5/functions-geometry.html
|
/// [PostgreSQL]: https://www.postgresql.org/docs/9.5/functions-geometry.html
|
||||||
GeometricType(GeometricTypeKind),
|
GeometricType(GeometricTypeKind),
|
||||||
|
/// PostgreSQL text search vectors, see [PostgreSQL].
|
||||||
|
///
|
||||||
|
/// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-textsearch.html
|
||||||
|
TsVector,
|
||||||
|
/// PostgreSQL text search query, see [PostgreSQL].
|
||||||
|
///
|
||||||
|
/// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-textsearch.html
|
||||||
|
TsQuery,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for DataType {
|
impl fmt::Display for DataType {
|
||||||
|
@ -627,7 +644,16 @@ impl fmt::Display for DataType {
|
||||||
timezone,
|
timezone,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
DataType::Interval => write!(f, "INTERVAL"),
|
DataType::Interval { fields, precision } => {
|
||||||
|
write!(f, "INTERVAL")?;
|
||||||
|
if let Some(fields) = fields {
|
||||||
|
write!(f, " {fields}")?;
|
||||||
|
}
|
||||||
|
if let Some(precision) = precision {
|
||||||
|
write!(f, "({precision})")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
DataType::JSON => write!(f, "JSON"),
|
DataType::JSON => write!(f, "JSON"),
|
||||||
DataType::JSONB => write!(f, "JSONB"),
|
DataType::JSONB => write!(f, "JSONB"),
|
||||||
DataType::Regclass => write!(f, "REGCLASS"),
|
DataType::Regclass => write!(f, "REGCLASS"),
|
||||||
|
@ -658,7 +684,7 @@ impl fmt::Display for DataType {
|
||||||
}
|
}
|
||||||
DataType::Enum(vals, bits) => {
|
DataType::Enum(vals, bits) => {
|
||||||
match bits {
|
match bits {
|
||||||
Some(bits) => write!(f, "ENUM{}", bits),
|
Some(bits) => write!(f, "ENUM{bits}"),
|
||||||
None => write!(f, "ENUM"),
|
None => write!(f, "ENUM"),
|
||||||
}?;
|
}?;
|
||||||
write!(f, "(")?;
|
write!(f, "(")?;
|
||||||
|
@ -706,16 +732,16 @@ impl fmt::Display for DataType {
|
||||||
}
|
}
|
||||||
// ClickHouse
|
// ClickHouse
|
||||||
DataType::Nullable(data_type) => {
|
DataType::Nullable(data_type) => {
|
||||||
write!(f, "Nullable({})", data_type)
|
write!(f, "Nullable({data_type})")
|
||||||
}
|
}
|
||||||
DataType::FixedString(character_length) => {
|
DataType::FixedString(character_length) => {
|
||||||
write!(f, "FixedString({})", character_length)
|
write!(f, "FixedString({character_length})")
|
||||||
}
|
}
|
||||||
DataType::LowCardinality(data_type) => {
|
DataType::LowCardinality(data_type) => {
|
||||||
write!(f, "LowCardinality({})", data_type)
|
write!(f, "LowCardinality({data_type})")
|
||||||
}
|
}
|
||||||
DataType::Map(key_data_type, value_data_type) => {
|
DataType::Map(key_data_type, value_data_type) => {
|
||||||
write!(f, "Map({}, {})", key_data_type, value_data_type)
|
write!(f, "Map({key_data_type}, {value_data_type})")
|
||||||
}
|
}
|
||||||
DataType::Tuple(fields) => {
|
DataType::Tuple(fields) => {
|
||||||
write!(f, "Tuple({})", display_comma_separated(fields))
|
write!(f, "Tuple({})", display_comma_separated(fields))
|
||||||
|
@ -737,7 +763,9 @@ impl fmt::Display for DataType {
|
||||||
DataType::NamedTable { name, columns } => {
|
DataType::NamedTable { name, columns } => {
|
||||||
write!(f, "{} TABLE ({})", name, display_comma_separated(columns))
|
write!(f, "{} TABLE ({})", name, display_comma_separated(columns))
|
||||||
}
|
}
|
||||||
DataType::GeometricType(kind) => write!(f, "{}", kind),
|
DataType::GeometricType(kind) => write!(f, "{kind}"),
|
||||||
|
DataType::TsVector => write!(f, "TSVECTOR"),
|
||||||
|
DataType::TsQuery => write!(f, "TSQUERY"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -879,6 +907,48 @@ impl fmt::Display for TimezoneInfo {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Fields for [Postgres] `INTERVAL` type.
|
||||||
|
///
|
||||||
|
/// [Postgres]: https://www.postgresql.org/docs/17/datatype-datetime.html
|
||||||
|
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
|
pub enum IntervalFields {
|
||||||
|
Year,
|
||||||
|
Month,
|
||||||
|
Day,
|
||||||
|
Hour,
|
||||||
|
Minute,
|
||||||
|
Second,
|
||||||
|
YearToMonth,
|
||||||
|
DayToHour,
|
||||||
|
DayToMinute,
|
||||||
|
DayToSecond,
|
||||||
|
HourToMinute,
|
||||||
|
HourToSecond,
|
||||||
|
MinuteToSecond,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for IntervalFields {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
IntervalFields::Year => write!(f, "YEAR"),
|
||||||
|
IntervalFields::Month => write!(f, "MONTH"),
|
||||||
|
IntervalFields::Day => write!(f, "DAY"),
|
||||||
|
IntervalFields::Hour => write!(f, "HOUR"),
|
||||||
|
IntervalFields::Minute => write!(f, "MINUTE"),
|
||||||
|
IntervalFields::Second => write!(f, "SECOND"),
|
||||||
|
IntervalFields::YearToMonth => write!(f, "YEAR TO MONTH"),
|
||||||
|
IntervalFields::DayToHour => write!(f, "DAY TO HOUR"),
|
||||||
|
IntervalFields::DayToMinute => write!(f, "DAY TO MINUTE"),
|
||||||
|
IntervalFields::DayToSecond => write!(f, "DAY TO SECOND"),
|
||||||
|
IntervalFields::HourToMinute => write!(f, "HOUR TO MINUTE"),
|
||||||
|
IntervalFields::HourToSecond => write!(f, "HOUR TO SECOND"),
|
||||||
|
IntervalFields::MinuteToSecond => write!(f, "MINUTE TO SECOND"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Additional information for `NUMERIC`, `DECIMAL`, and `DEC` data types
|
/// Additional information for `NUMERIC`, `DECIMAL`, and `DEC` data types
|
||||||
/// following the 2016 [SQL Standard].
|
/// following the 2016 [SQL Standard].
|
||||||
///
|
///
|
||||||
|
@ -932,7 +1002,7 @@ impl fmt::Display for CharacterLength {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
CharacterLength::IntegerLength { length, unit } => {
|
CharacterLength::IntegerLength { length, unit } => {
|
||||||
write!(f, "{}", length)?;
|
write!(f, "{length}")?;
|
||||||
if let Some(unit) = unit {
|
if let Some(unit) = unit {
|
||||||
write!(f, " {unit}")?;
|
write!(f, " {unit}")?;
|
||||||
}
|
}
|
||||||
|
@ -987,7 +1057,7 @@ impl fmt::Display for BinaryLength {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
BinaryLength::IntegerLength { length } => {
|
BinaryLength::IntegerLength { length } => {
|
||||||
write!(f, "{}", length)?;
|
write!(f, "{length}")?;
|
||||||
}
|
}
|
||||||
BinaryLength::Max => {
|
BinaryLength::Max => {
|
||||||
write!(f, "MAX")?;
|
write!(f, "MAX")?;
|
||||||
|
|
|
@ -173,7 +173,7 @@ impl fmt::Display for AlterRoleOperation {
|
||||||
in_database,
|
in_database,
|
||||||
} => {
|
} => {
|
||||||
if let Some(database_name) = in_database {
|
if let Some(database_name) = in_database {
|
||||||
write!(f, "IN DATABASE {} ", database_name)?;
|
write!(f, "IN DATABASE {database_name} ")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
match config_value {
|
match config_value {
|
||||||
|
@ -187,7 +187,7 @@ impl fmt::Display for AlterRoleOperation {
|
||||||
in_database,
|
in_database,
|
||||||
} => {
|
} => {
|
||||||
if let Some(database_name) = in_database {
|
if let Some(database_name) = in_database {
|
||||||
write!(f, "IN DATABASE {} ", database_name)?;
|
write!(f, "IN DATABASE {database_name} ")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
match config_name {
|
match config_name {
|
||||||
|
@ -218,15 +218,15 @@ impl fmt::Display for Use {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
f.write_str("USE ")?;
|
f.write_str("USE ")?;
|
||||||
match self {
|
match self {
|
||||||
Use::Catalog(name) => write!(f, "CATALOG {}", name),
|
Use::Catalog(name) => write!(f, "CATALOG {name}"),
|
||||||
Use::Schema(name) => write!(f, "SCHEMA {}", name),
|
Use::Schema(name) => write!(f, "SCHEMA {name}"),
|
||||||
Use::Database(name) => write!(f, "DATABASE {}", name),
|
Use::Database(name) => write!(f, "DATABASE {name}"),
|
||||||
Use::Warehouse(name) => write!(f, "WAREHOUSE {}", name),
|
Use::Warehouse(name) => write!(f, "WAREHOUSE {name}"),
|
||||||
Use::Role(name) => write!(f, "ROLE {}", name),
|
Use::Role(name) => write!(f, "ROLE {name}"),
|
||||||
Use::SecondaryRoles(secondary_roles) => {
|
Use::SecondaryRoles(secondary_roles) => {
|
||||||
write!(f, "SECONDARY ROLES {}", secondary_roles)
|
write!(f, "SECONDARY ROLES {secondary_roles}")
|
||||||
}
|
}
|
||||||
Use::Object(name) => write!(f, "{}", name),
|
Use::Object(name) => write!(f, "{name}"),
|
||||||
Use::Default => write!(f, "DEFAULT"),
|
Use::Default => write!(f, "DEFAULT"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
147
src/ast/ddl.rs
147
src/ast/ddl.rs
|
@ -30,11 +30,11 @@ use sqlparser_derive::{Visit, VisitMut};
|
||||||
|
|
||||||
use crate::ast::value::escape_single_quote_string;
|
use crate::ast::value::escape_single_quote_string;
|
||||||
use crate::ast::{
|
use crate::ast::{
|
||||||
display_comma_separated, display_separated, CommentDef, CreateFunctionBody,
|
display_comma_separated, display_separated, ArgMode, CommentDef, CreateFunctionBody,
|
||||||
CreateFunctionUsing, DataType, Expr, FunctionBehavior, FunctionCalledOnNull,
|
CreateFunctionUsing, DataType, Expr, FunctionBehavior, FunctionCalledOnNull,
|
||||||
FunctionDeterminismSpecifier, FunctionParallel, Ident, MySQLColumnPosition, ObjectName,
|
FunctionDeterminismSpecifier, FunctionParallel, Ident, IndexColumn, MySQLColumnPosition,
|
||||||
OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag, Value,
|
ObjectName, OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag,
|
||||||
ValueWithSpan,
|
Value, ValueWithSpan,
|
||||||
};
|
};
|
||||||
use crate::keywords::Keyword;
|
use crate::keywords::Keyword;
|
||||||
use crate::tokenizer::Token;
|
use crate::tokenizer::Token;
|
||||||
|
@ -57,7 +57,7 @@ impl fmt::Display for ReplicaIdentity {
|
||||||
ReplicaIdentity::None => f.write_str("NONE"),
|
ReplicaIdentity::None => f.write_str("NONE"),
|
||||||
ReplicaIdentity::Full => f.write_str("FULL"),
|
ReplicaIdentity::Full => f.write_str("FULL"),
|
||||||
ReplicaIdentity::Default => f.write_str("DEFAULT"),
|
ReplicaIdentity::Default => f.write_str("DEFAULT"),
|
||||||
ReplicaIdentity::Index(idx) => write!(f, "USING INDEX {}", idx),
|
ReplicaIdentity::Index(idx) => write!(f, "USING INDEX {idx}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -67,8 +67,11 @@ impl fmt::Display for ReplicaIdentity {
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
pub enum AlterTableOperation {
|
pub enum AlterTableOperation {
|
||||||
/// `ADD <table_constraint>`
|
/// `ADD <table_constraint> [NOT VALID]`
|
||||||
AddConstraint(TableConstraint),
|
AddConstraint {
|
||||||
|
constraint: TableConstraint,
|
||||||
|
not_valid: bool,
|
||||||
|
},
|
||||||
/// `ADD [COLUMN] [IF NOT EXISTS] <column_def>`
|
/// `ADD [COLUMN] [IF NOT EXISTS] <column_def>`
|
||||||
AddColumn {
|
AddColumn {
|
||||||
/// `[COLUMN]`.
|
/// `[COLUMN]`.
|
||||||
|
@ -137,16 +140,16 @@ pub enum AlterTableOperation {
|
||||||
name: Ident,
|
name: Ident,
|
||||||
drop_behavior: Option<DropBehavior>,
|
drop_behavior: Option<DropBehavior>,
|
||||||
},
|
},
|
||||||
/// `DROP [ COLUMN ] [ IF EXISTS ] <column_name> [ CASCADE ]`
|
/// `DROP [ COLUMN ] [ IF EXISTS ] <column_name> [ , <column_name>, ... ] [ CASCADE ]`
|
||||||
DropColumn {
|
DropColumn {
|
||||||
has_column_keyword: bool,
|
has_column_keyword: bool,
|
||||||
column_name: Ident,
|
column_names: Vec<Ident>,
|
||||||
if_exists: bool,
|
if_exists: bool,
|
||||||
drop_behavior: Option<DropBehavior>,
|
drop_behavior: Option<DropBehavior>,
|
||||||
},
|
},
|
||||||
/// `ATTACH PART|PARTITION <partition_expr>`
|
/// `ATTACH PART|PARTITION <partition_expr>`
|
||||||
/// Note: this is a ClickHouse-specific operation, please refer to
|
/// Note: this is a ClickHouse-specific operation, please refer to
|
||||||
/// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/alter/pakrtition#attach-partitionpart)
|
/// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/alter/partition#attach-partitionpart)
|
||||||
AttachPartition {
|
AttachPartition {
|
||||||
// PART is not a short form of PARTITION, it's a separate keyword
|
// PART is not a short form of PARTITION, it's a separate keyword
|
||||||
// which represents a physical file on disk and partition is a logical entity.
|
// which represents a physical file on disk and partition is a logical entity.
|
||||||
|
@ -344,6 +347,20 @@ pub enum AlterTableOperation {
|
||||||
equals: bool,
|
equals: bool,
|
||||||
value: ValueWithSpan,
|
value: ValueWithSpan,
|
||||||
},
|
},
|
||||||
|
/// `VALIDATE CONSTRAINT <name>`
|
||||||
|
ValidateConstraint {
|
||||||
|
name: Ident,
|
||||||
|
},
|
||||||
|
/// Arbitrary parenthesized `SET` options.
|
||||||
|
///
|
||||||
|
/// Example:
|
||||||
|
/// ```sql
|
||||||
|
/// SET (scale_factor = 0.01, threshold = 500)`
|
||||||
|
/// ```
|
||||||
|
/// [PostgreSQL](https://www.postgresql.org/docs/current/sql-altertable.html)
|
||||||
|
SetOptionsParens {
|
||||||
|
options: Vec<SqlOption>,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An `ALTER Policy` (`Statement::AlterPolicy`) operation
|
/// An `ALTER Policy` (`Statement::AlterPolicy`) operation
|
||||||
|
@ -450,7 +467,7 @@ pub enum Owner {
|
||||||
impl fmt::Display for Owner {
|
impl fmt::Display for Owner {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Owner::Ident(ident) => write!(f, "{}", ident),
|
Owner::Ident(ident) => write!(f, "{ident}"),
|
||||||
Owner::CurrentRole => write!(f, "CURRENT_ROLE"),
|
Owner::CurrentRole => write!(f, "CURRENT_ROLE"),
|
||||||
Owner::CurrentUser => write!(f, "CURRENT_USER"),
|
Owner::CurrentUser => write!(f, "CURRENT_USER"),
|
||||||
Owner::SessionUser => write!(f, "SESSION_USER"),
|
Owner::SessionUser => write!(f, "SESSION_USER"),
|
||||||
|
@ -494,7 +511,16 @@ impl fmt::Display for AlterTableOperation {
|
||||||
display_separated(new_partitions, " "),
|
display_separated(new_partitions, " "),
|
||||||
ine = if *if_not_exists { " IF NOT EXISTS" } else { "" }
|
ine = if *if_not_exists { " IF NOT EXISTS" } else { "" }
|
||||||
),
|
),
|
||||||
AlterTableOperation::AddConstraint(c) => write!(f, "ADD {c}"),
|
AlterTableOperation::AddConstraint {
|
||||||
|
not_valid,
|
||||||
|
constraint,
|
||||||
|
} => {
|
||||||
|
write!(f, "ADD {constraint}")?;
|
||||||
|
if *not_valid {
|
||||||
|
write!(f, " NOT VALID")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
AlterTableOperation::AddColumn {
|
AlterTableOperation::AddColumn {
|
||||||
column_keyword,
|
column_keyword,
|
||||||
if_not_exists,
|
if_not_exists,
|
||||||
|
@ -525,7 +551,7 @@ impl fmt::Display for AlterTableOperation {
|
||||||
if *if_not_exists {
|
if *if_not_exists {
|
||||||
write!(f, " IF NOT EXISTS")?;
|
write!(f, " IF NOT EXISTS")?;
|
||||||
}
|
}
|
||||||
write!(f, " {} ({})", name, query)
|
write!(f, " {name} ({query})")
|
||||||
}
|
}
|
||||||
AlterTableOperation::Algorithm { equals, algorithm } => {
|
AlterTableOperation::Algorithm { equals, algorithm } => {
|
||||||
write!(
|
write!(
|
||||||
|
@ -540,7 +566,7 @@ impl fmt::Display for AlterTableOperation {
|
||||||
if *if_exists {
|
if *if_exists {
|
||||||
write!(f, " IF EXISTS")?;
|
write!(f, " IF EXISTS")?;
|
||||||
}
|
}
|
||||||
write!(f, " {}", name)
|
write!(f, " {name}")
|
||||||
}
|
}
|
||||||
AlterTableOperation::MaterializeProjection {
|
AlterTableOperation::MaterializeProjection {
|
||||||
if_exists,
|
if_exists,
|
||||||
|
@ -551,9 +577,9 @@ impl fmt::Display for AlterTableOperation {
|
||||||
if *if_exists {
|
if *if_exists {
|
||||||
write!(f, " IF EXISTS")?;
|
write!(f, " IF EXISTS")?;
|
||||||
}
|
}
|
||||||
write!(f, " {}", name)?;
|
write!(f, " {name}")?;
|
||||||
if let Some(partition) = partition {
|
if let Some(partition) = partition {
|
||||||
write!(f, " IN PARTITION {}", partition)?;
|
write!(f, " IN PARTITION {partition}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -566,9 +592,9 @@ impl fmt::Display for AlterTableOperation {
|
||||||
if *if_exists {
|
if *if_exists {
|
||||||
write!(f, " IF EXISTS")?;
|
write!(f, " IF EXISTS")?;
|
||||||
}
|
}
|
||||||
write!(f, " {}", name)?;
|
write!(f, " {name}")?;
|
||||||
if let Some(partition) = partition {
|
if let Some(partition) = partition {
|
||||||
write!(f, " IN PARTITION {}", partition)?;
|
write!(f, " IN PARTITION {partition}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -615,7 +641,7 @@ impl fmt::Display for AlterTableOperation {
|
||||||
AlterTableOperation::DropIndex { name } => write!(f, "DROP INDEX {name}"),
|
AlterTableOperation::DropIndex { name } => write!(f, "DROP INDEX {name}"),
|
||||||
AlterTableOperation::DropColumn {
|
AlterTableOperation::DropColumn {
|
||||||
has_column_keyword,
|
has_column_keyword,
|
||||||
column_name,
|
column_names: column_name,
|
||||||
if_exists,
|
if_exists,
|
||||||
drop_behavior,
|
drop_behavior,
|
||||||
} => write!(
|
} => write!(
|
||||||
|
@ -623,7 +649,7 @@ impl fmt::Display for AlterTableOperation {
|
||||||
"DROP {}{}{}{}",
|
"DROP {}{}{}{}",
|
||||||
if *has_column_keyword { "COLUMN " } else { "" },
|
if *has_column_keyword { "COLUMN " } else { "" },
|
||||||
if *if_exists { "IF EXISTS " } else { "" },
|
if *if_exists { "IF EXISTS " } else { "" },
|
||||||
column_name,
|
display_comma_separated(column_name),
|
||||||
match drop_behavior {
|
match drop_behavior {
|
||||||
None => "",
|
None => "",
|
||||||
Some(DropBehavior::Restrict) => " RESTRICT",
|
Some(DropBehavior::Restrict) => " RESTRICT",
|
||||||
|
@ -772,6 +798,12 @@ impl fmt::Display for AlterTableOperation {
|
||||||
AlterTableOperation::ReplicaIdentity { identity } => {
|
AlterTableOperation::ReplicaIdentity { identity } => {
|
||||||
write!(f, "REPLICA IDENTITY {identity}")
|
write!(f, "REPLICA IDENTITY {identity}")
|
||||||
}
|
}
|
||||||
|
AlterTableOperation::ValidateConstraint { name } => {
|
||||||
|
write!(f, "VALIDATE CONSTRAINT {name}")
|
||||||
|
}
|
||||||
|
AlterTableOperation::SetOptionsParens { options } => {
|
||||||
|
write!(f, "SET ({})", display_comma_separated(options))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -893,7 +925,10 @@ pub enum AlterColumnOperation {
|
||||||
data_type: DataType,
|
data_type: DataType,
|
||||||
/// PostgreSQL specific
|
/// PostgreSQL specific
|
||||||
using: Option<Expr>,
|
using: Option<Expr>,
|
||||||
|
/// Set to true if the statement includes the `SET DATA TYPE` keywords
|
||||||
|
had_set: bool,
|
||||||
},
|
},
|
||||||
|
|
||||||
/// `ADD GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( sequence_options ) ]`
|
/// `ADD GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( sequence_options ) ]`
|
||||||
///
|
///
|
||||||
/// Note: this is a PostgreSQL-specific operation.
|
/// Note: this is a PostgreSQL-specific operation.
|
||||||
|
@ -914,12 +949,19 @@ impl fmt::Display for AlterColumnOperation {
|
||||||
AlterColumnOperation::DropDefault => {
|
AlterColumnOperation::DropDefault => {
|
||||||
write!(f, "DROP DEFAULT")
|
write!(f, "DROP DEFAULT")
|
||||||
}
|
}
|
||||||
AlterColumnOperation::SetDataType { data_type, using } => {
|
AlterColumnOperation::SetDataType {
|
||||||
if let Some(expr) = using {
|
data_type,
|
||||||
write!(f, "SET DATA TYPE {data_type} USING {expr}")
|
using,
|
||||||
} else {
|
had_set,
|
||||||
write!(f, "SET DATA TYPE {data_type}")
|
} => {
|
||||||
|
if *had_set {
|
||||||
|
write!(f, "SET DATA ")?;
|
||||||
}
|
}
|
||||||
|
write!(f, "TYPE {data_type}")?;
|
||||||
|
if let Some(expr) = using {
|
||||||
|
write!(f, " USING {expr}")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
AlterColumnOperation::AddGenerated {
|
AlterColumnOperation::AddGenerated {
|
||||||
generated_as,
|
generated_as,
|
||||||
|
@ -979,7 +1021,7 @@ pub enum TableConstraint {
|
||||||
/// [1]: IndexType
|
/// [1]: IndexType
|
||||||
index_type: Option<IndexType>,
|
index_type: Option<IndexType>,
|
||||||
/// Identifiers of the columns that are unique.
|
/// Identifiers of the columns that are unique.
|
||||||
columns: Vec<Ident>,
|
columns: Vec<IndexColumn>,
|
||||||
index_options: Vec<IndexOption>,
|
index_options: Vec<IndexOption>,
|
||||||
characteristics: Option<ConstraintCharacteristics>,
|
characteristics: Option<ConstraintCharacteristics>,
|
||||||
/// Optional Postgres nulls handling: `[ NULLS [ NOT ] DISTINCT ]`
|
/// Optional Postgres nulls handling: `[ NULLS [ NOT ] DISTINCT ]`
|
||||||
|
@ -1015,7 +1057,7 @@ pub enum TableConstraint {
|
||||||
/// [1]: IndexType
|
/// [1]: IndexType
|
||||||
index_type: Option<IndexType>,
|
index_type: Option<IndexType>,
|
||||||
/// Identifiers of the columns that form the primary key.
|
/// Identifiers of the columns that form the primary key.
|
||||||
columns: Vec<Ident>,
|
columns: Vec<IndexColumn>,
|
||||||
index_options: Vec<IndexOption>,
|
index_options: Vec<IndexOption>,
|
||||||
characteristics: Option<ConstraintCharacteristics>,
|
characteristics: Option<ConstraintCharacteristics>,
|
||||||
},
|
},
|
||||||
|
@ -1060,7 +1102,7 @@ pub enum TableConstraint {
|
||||||
/// [1]: IndexType
|
/// [1]: IndexType
|
||||||
index_type: Option<IndexType>,
|
index_type: Option<IndexType>,
|
||||||
/// Referred column identifier list.
|
/// Referred column identifier list.
|
||||||
columns: Vec<Ident>,
|
columns: Vec<IndexColumn>,
|
||||||
},
|
},
|
||||||
/// MySQLs [fulltext][1] definition. Since the [`SPATIAL`][2] definition is exactly the same,
|
/// MySQLs [fulltext][1] definition. Since the [`SPATIAL`][2] definition is exactly the same,
|
||||||
/// and MySQL displays both the same way, it is part of this definition as well.
|
/// and MySQL displays both the same way, it is part of this definition as well.
|
||||||
|
@ -1083,7 +1125,7 @@ pub enum TableConstraint {
|
||||||
/// Optional index name.
|
/// Optional index name.
|
||||||
opt_index_name: Option<Ident>,
|
opt_index_name: Option<Ident>,
|
||||||
/// Referred column identifier list.
|
/// Referred column identifier list.
|
||||||
columns: Vec<Ident>,
|
columns: Vec<IndexColumn>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1168,7 +1210,7 @@ impl fmt::Display for TableConstraint {
|
||||||
write!(f, " ON UPDATE {action}")?;
|
write!(f, " ON UPDATE {action}")?;
|
||||||
}
|
}
|
||||||
if let Some(characteristics) = characteristics {
|
if let Some(characteristics) = characteristics {
|
||||||
write!(f, " {}", characteristics)?;
|
write!(f, " {characteristics}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1308,7 +1350,7 @@ impl fmt::Display for IndexType {
|
||||||
Self::SPGiST => write!(f, "SPGIST"),
|
Self::SPGiST => write!(f, "SPGIST"),
|
||||||
Self::BRIN => write!(f, "BRIN"),
|
Self::BRIN => write!(f, "BRIN"),
|
||||||
Self::Bloom => write!(f, "BLOOM"),
|
Self::Bloom => write!(f, "BLOOM"),
|
||||||
Self::Custom(name) => write!(f, "{}", name),
|
Self::Custom(name) => write!(f, "{name}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1367,13 +1409,18 @@ impl fmt::Display for NullsDistinctOption {
|
||||||
pub struct ProcedureParam {
|
pub struct ProcedureParam {
|
||||||
pub name: Ident,
|
pub name: Ident,
|
||||||
pub data_type: DataType,
|
pub data_type: DataType,
|
||||||
|
pub mode: Option<ArgMode>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for ProcedureParam {
|
impl fmt::Display for ProcedureParam {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
if let Some(mode) = &self.mode {
|
||||||
|
write!(f, "{mode} {} {}", self.name, self.data_type)
|
||||||
|
} else {
|
||||||
write!(f, "{} {}", self.name, self.data_type)
|
write!(f, "{} {}", self.name, self.data_type)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// SQL column definition
|
/// SQL column definition
|
||||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
|
@ -1421,17 +1468,41 @@ impl fmt::Display for ColumnDef {
|
||||||
pub struct ViewColumnDef {
|
pub struct ViewColumnDef {
|
||||||
pub name: Ident,
|
pub name: Ident,
|
||||||
pub data_type: Option<DataType>,
|
pub data_type: Option<DataType>,
|
||||||
pub options: Option<Vec<ColumnOption>>,
|
pub options: Option<ColumnOptions>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
|
pub enum ColumnOptions {
|
||||||
|
CommaSeparated(Vec<ColumnOption>),
|
||||||
|
SpaceSeparated(Vec<ColumnOption>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ColumnOptions {
|
||||||
|
pub fn as_slice(&self) -> &[ColumnOption] {
|
||||||
|
match self {
|
||||||
|
ColumnOptions::CommaSeparated(options) => options.as_slice(),
|
||||||
|
ColumnOptions::SpaceSeparated(options) => options.as_slice(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for ViewColumnDef {
|
impl fmt::Display for ViewColumnDef {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}", self.name)?;
|
write!(f, "{}", self.name)?;
|
||||||
if let Some(data_type) = self.data_type.as_ref() {
|
if let Some(data_type) = self.data_type.as_ref() {
|
||||||
write!(f, " {}", data_type)?;
|
write!(f, " {data_type}")?;
|
||||||
}
|
}
|
||||||
if let Some(options) = self.options.as_ref() {
|
if let Some(options) = self.options.as_ref() {
|
||||||
write!(f, " {}", display_comma_separated(options.as_slice()))?;
|
match options {
|
||||||
|
ColumnOptions::CommaSeparated(column_options) => {
|
||||||
|
write!(f, " {}", display_comma_separated(column_options.as_slice()))?;
|
||||||
|
}
|
||||||
|
ColumnOptions::SpaceSeparated(column_options) => {
|
||||||
|
write!(f, " {}", display_separated(column_options.as_slice(), " "))?
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1651,7 +1722,7 @@ pub struct ColumnPolicyProperty {
|
||||||
/// ```
|
/// ```
|
||||||
/// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table
|
/// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table
|
||||||
pub with: bool,
|
pub with: bool,
|
||||||
pub policy_name: Ident,
|
pub policy_name: ObjectName,
|
||||||
pub using_columns: Option<Vec<Ident>>,
|
pub using_columns: Option<Vec<Ident>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1816,7 +1887,7 @@ impl fmt::Display for ColumnOption {
|
||||||
} => {
|
} => {
|
||||||
write!(f, "{}", if *is_primary { "PRIMARY KEY" } else { "UNIQUE" })?;
|
write!(f, "{}", if *is_primary { "PRIMARY KEY" } else { "UNIQUE" })?;
|
||||||
if let Some(characteristics) = characteristics {
|
if let Some(characteristics) = characteristics {
|
||||||
write!(f, " {}", characteristics)?;
|
write!(f, " {characteristics}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1838,7 +1909,7 @@ impl fmt::Display for ColumnOption {
|
||||||
write!(f, " ON UPDATE {action}")?;
|
write!(f, " ON UPDATE {action}")?;
|
||||||
}
|
}
|
||||||
if let Some(characteristics) = characteristics {
|
if let Some(characteristics) = characteristics {
|
||||||
write!(f, " {}", characteristics)?;
|
write!(f, " {characteristics}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1898,7 +1969,7 @@ impl fmt::Display for ColumnOption {
|
||||||
write!(f, "{parameters}")
|
write!(f, "{parameters}")
|
||||||
}
|
}
|
||||||
OnConflict(keyword) => {
|
OnConflict(keyword) => {
|
||||||
write!(f, "ON CONFLICT {:?}", keyword)?;
|
write!(f, "ON CONFLICT {keyword:?}")?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
Policy(parameters) => {
|
Policy(parameters) => {
|
||||||
|
|
|
@ -55,7 +55,7 @@ impl Display for IndexColumn {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}", self.column)?;
|
write!(f, "{}", self.column)?;
|
||||||
if let Some(operator_class) = &self.operator_class {
|
if let Some(operator_class) = &self.operator_class {
|
||||||
write!(f, " {}", operator_class)?;
|
write!(f, " {operator_class}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -175,7 +175,9 @@ pub struct CreateTable {
|
||||||
pub partition_by: Option<Box<Expr>>,
|
pub partition_by: Option<Box<Expr>>,
|
||||||
/// BigQuery: Table clustering column list.
|
/// BigQuery: Table clustering column list.
|
||||||
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#table_option_list>
|
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#table_option_list>
|
||||||
pub cluster_by: Option<WrappedCollection<Vec<Ident>>>,
|
/// Snowflake: Table clustering list which contains base column, expressions on base columns.
|
||||||
|
/// <https://docs.snowflake.com/en/user-guide/tables-clustering-keys#defining-a-clustering-key-for-a-table>
|
||||||
|
pub cluster_by: Option<WrappedCollection<Vec<Expr>>>,
|
||||||
/// Hive: Table clustering column list.
|
/// Hive: Table clustering column list.
|
||||||
/// <https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable>
|
/// <https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable>
|
||||||
pub clustered_by: Option<ClusteredBy>,
|
pub clustered_by: Option<ClusteredBy>,
|
||||||
|
@ -264,7 +266,7 @@ impl Display for CreateTable {
|
||||||
name = self.name,
|
name = self.name,
|
||||||
)?;
|
)?;
|
||||||
if let Some(on_cluster) = &self.on_cluster {
|
if let Some(on_cluster) = &self.on_cluster {
|
||||||
write!(f, " ON CLUSTER {}", on_cluster)?;
|
write!(f, " ON CLUSTER {on_cluster}")?;
|
||||||
}
|
}
|
||||||
if !self.columns.is_empty() || !self.constraints.is_empty() {
|
if !self.columns.is_empty() || !self.constraints.is_empty() {
|
||||||
f.write_str(" (")?;
|
f.write_str(" (")?;
|
||||||
|
@ -381,15 +383,15 @@ impl Display for CreateTable {
|
||||||
match &self.table_options {
|
match &self.table_options {
|
||||||
options @ CreateTableOptions::With(_)
|
options @ CreateTableOptions::With(_)
|
||||||
| options @ CreateTableOptions::Plain(_)
|
| options @ CreateTableOptions::Plain(_)
|
||||||
| options @ CreateTableOptions::TableProperties(_) => write!(f, " {}", options)?,
|
| options @ CreateTableOptions::TableProperties(_) => write!(f, " {options}")?,
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(primary_key) = &self.primary_key {
|
if let Some(primary_key) = &self.primary_key {
|
||||||
write!(f, " PRIMARY KEY {}", primary_key)?;
|
write!(f, " PRIMARY KEY {primary_key}")?;
|
||||||
}
|
}
|
||||||
if let Some(order_by) = &self.order_by {
|
if let Some(order_by) = &self.order_by {
|
||||||
write!(f, " ORDER BY {}", order_by)?;
|
write!(f, " ORDER BY {order_by}")?;
|
||||||
}
|
}
|
||||||
if let Some(inherits) = &self.inherits {
|
if let Some(inherits) = &self.inherits {
|
||||||
write!(f, " INHERITS ({})", display_comma_separated(inherits))?;
|
write!(f, " INHERITS ({})", display_comma_separated(inherits))?;
|
||||||
|
@ -401,7 +403,7 @@ impl Display for CreateTable {
|
||||||
write!(f, " CLUSTER BY {cluster_by}")?;
|
write!(f, " CLUSTER BY {cluster_by}")?;
|
||||||
}
|
}
|
||||||
if let options @ CreateTableOptions::Options(_) = &self.table_options {
|
if let options @ CreateTableOptions::Options(_) = &self.table_options {
|
||||||
write!(f, " {}", options)?;
|
write!(f, " {options}")?;
|
||||||
}
|
}
|
||||||
if let Some(external_volume) = self.external_volume.as_ref() {
|
if let Some(external_volume) = self.external_volume.as_ref() {
|
||||||
write!(f, " EXTERNAL_VOLUME = '{external_volume}'")?;
|
write!(f, " EXTERNAL_VOLUME = '{external_volume}'")?;
|
||||||
|
|
|
@ -31,11 +31,22 @@ use serde::{Deserialize, Serialize};
|
||||||
#[cfg(feature = "visitor")]
|
#[cfg(feature = "visitor")]
|
||||||
use sqlparser_derive::{Visit, VisitMut};
|
use sqlparser_derive::{Visit, VisitMut};
|
||||||
|
|
||||||
|
use crate::ast::display_separated;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
pub struct KeyValueOptions {
|
pub struct KeyValueOptions {
|
||||||
pub options: Vec<KeyValueOption>,
|
pub options: Vec<KeyValueOption>,
|
||||||
|
pub delimiter: KeyValueOptionsDelimiter,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
|
pub enum KeyValueOptionsDelimiter {
|
||||||
|
Space,
|
||||||
|
Comma,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
@ -59,18 +70,11 @@ pub struct KeyValueOption {
|
||||||
|
|
||||||
impl fmt::Display for KeyValueOptions {
|
impl fmt::Display for KeyValueOptions {
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||||
if !self.options.is_empty() {
|
let sep = match self.delimiter {
|
||||||
let mut first = false;
|
KeyValueOptionsDelimiter::Space => " ",
|
||||||
for option in &self.options {
|
KeyValueOptionsDelimiter::Comma => ", ",
|
||||||
if !first {
|
};
|
||||||
first = true;
|
write!(f, "{}", display_separated(&self.options, sep))
|
||||||
} else {
|
|
||||||
f.write_str(" ")?;
|
|
||||||
}
|
|
||||||
write!(f, "{}", option)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -16,5 +16,6 @@
|
||||||
// under the License.
|
// under the License.
|
||||||
pub mod attached_token;
|
pub mod attached_token;
|
||||||
pub mod key_value_options;
|
pub mod key_value_options;
|
||||||
|
pub mod stmt_create_database;
|
||||||
pub mod stmt_create_table;
|
pub mod stmt_create_table;
|
||||||
pub mod stmt_data_loading;
|
pub mod stmt_data_loading;
|
||||||
|
|
324
src/ast/helpers/stmt_create_database.rs
Normal file
324
src/ast/helpers/stmt_create_database.rs
Normal file
|
@ -0,0 +1,324 @@
|
||||||
|
// Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
// or more contributor license agreements. See the NOTICE file
|
||||||
|
// distributed with this work for additional information
|
||||||
|
// regarding copyright ownership. The ASF licenses this file
|
||||||
|
// to you under the Apache License, Version 2.0 (the
|
||||||
|
// "License"); you may not use this file except in compliance
|
||||||
|
// with the License. You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing,
|
||||||
|
// software distributed under the License is distributed on an
|
||||||
|
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
// KIND, either express or implied. See the License for the
|
||||||
|
// specific language governing permissions and limitations
|
||||||
|
// under the License.
|
||||||
|
|
||||||
|
#[cfg(not(feature = "std"))]
|
||||||
|
use alloc::{boxed::Box, format, string::String, vec, vec::Vec};
|
||||||
|
|
||||||
|
#[cfg(feature = "serde")]
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[cfg(feature = "visitor")]
|
||||||
|
use sqlparser_derive::{Visit, VisitMut};
|
||||||
|
|
||||||
|
use crate::ast::{
|
||||||
|
CatalogSyncNamespaceMode, ContactEntry, ObjectName, Statement, StorageSerializationPolicy, Tag,
|
||||||
|
};
|
||||||
|
use crate::parser::ParserError;
|
||||||
|
|
||||||
|
/// Builder for create database statement variant ([1]).
|
||||||
|
///
|
||||||
|
/// This structure helps building and accessing a create database with more ease, without needing to:
|
||||||
|
/// - Match the enum itself a lot of times; or
|
||||||
|
/// - Moving a lot of variables around the code.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
/// ```rust
|
||||||
|
/// use sqlparser::ast::helpers::stmt_create_database::CreateDatabaseBuilder;
|
||||||
|
/// use sqlparser::ast::{ColumnDef, Ident, ObjectName};
|
||||||
|
/// let builder = CreateDatabaseBuilder::new(ObjectName::from(vec![Ident::new("database_name")]))
|
||||||
|
/// .if_not_exists(true);
|
||||||
|
/// // You can access internal elements with ease
|
||||||
|
/// assert!(builder.if_not_exists);
|
||||||
|
/// // Convert to a statement
|
||||||
|
/// assert_eq!(
|
||||||
|
/// builder.build().to_string(),
|
||||||
|
/// "CREATE DATABASE IF NOT EXISTS database_name"
|
||||||
|
/// )
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// [1]: Statement::CreateDatabase
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
|
pub struct CreateDatabaseBuilder {
|
||||||
|
pub db_name: ObjectName,
|
||||||
|
pub if_not_exists: bool,
|
||||||
|
pub location: Option<String>,
|
||||||
|
pub managed_location: Option<String>,
|
||||||
|
pub or_replace: bool,
|
||||||
|
pub transient: bool,
|
||||||
|
pub clone: Option<ObjectName>,
|
||||||
|
pub data_retention_time_in_days: Option<u64>,
|
||||||
|
pub max_data_extension_time_in_days: Option<u64>,
|
||||||
|
pub external_volume: Option<String>,
|
||||||
|
pub catalog: Option<String>,
|
||||||
|
pub replace_invalid_characters: Option<bool>,
|
||||||
|
pub default_ddl_collation: Option<String>,
|
||||||
|
pub storage_serialization_policy: Option<StorageSerializationPolicy>,
|
||||||
|
pub comment: Option<String>,
|
||||||
|
pub catalog_sync: Option<String>,
|
||||||
|
pub catalog_sync_namespace_mode: Option<CatalogSyncNamespaceMode>,
|
||||||
|
pub catalog_sync_namespace_flatten_delimiter: Option<String>,
|
||||||
|
pub with_tags: Option<Vec<Tag>>,
|
||||||
|
pub with_contacts: Option<Vec<ContactEntry>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CreateDatabaseBuilder {
|
||||||
|
pub fn new(name: ObjectName) -> Self {
|
||||||
|
Self {
|
||||||
|
db_name: name,
|
||||||
|
if_not_exists: false,
|
||||||
|
location: None,
|
||||||
|
managed_location: None,
|
||||||
|
or_replace: false,
|
||||||
|
transient: false,
|
||||||
|
clone: None,
|
||||||
|
data_retention_time_in_days: None,
|
||||||
|
max_data_extension_time_in_days: None,
|
||||||
|
external_volume: None,
|
||||||
|
catalog: None,
|
||||||
|
replace_invalid_characters: None,
|
||||||
|
default_ddl_collation: None,
|
||||||
|
storage_serialization_policy: None,
|
||||||
|
comment: None,
|
||||||
|
catalog_sync: None,
|
||||||
|
catalog_sync_namespace_mode: None,
|
||||||
|
catalog_sync_namespace_flatten_delimiter: None,
|
||||||
|
with_tags: None,
|
||||||
|
with_contacts: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn location(mut self, location: Option<String>) -> Self {
|
||||||
|
self.location = location;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn managed_location(mut self, managed_location: Option<String>) -> Self {
|
||||||
|
self.managed_location = managed_location;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn or_replace(mut self, or_replace: bool) -> Self {
|
||||||
|
self.or_replace = or_replace;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn transient(mut self, transient: bool) -> Self {
|
||||||
|
self.transient = transient;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn if_not_exists(mut self, if_not_exists: bool) -> Self {
|
||||||
|
self.if_not_exists = if_not_exists;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn clone_clause(mut self, clone: Option<ObjectName>) -> Self {
|
||||||
|
self.clone = clone;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn data_retention_time_in_days(mut self, data_retention_time_in_days: Option<u64>) -> Self {
|
||||||
|
self.data_retention_time_in_days = data_retention_time_in_days;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn max_data_extension_time_in_days(
|
||||||
|
mut self,
|
||||||
|
max_data_extension_time_in_days: Option<u64>,
|
||||||
|
) -> Self {
|
||||||
|
self.max_data_extension_time_in_days = max_data_extension_time_in_days;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn external_volume(mut self, external_volume: Option<String>) -> Self {
|
||||||
|
self.external_volume = external_volume;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn catalog(mut self, catalog: Option<String>) -> Self {
|
||||||
|
self.catalog = catalog;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn replace_invalid_characters(mut self, replace_invalid_characters: Option<bool>) -> Self {
|
||||||
|
self.replace_invalid_characters = replace_invalid_characters;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn default_ddl_collation(mut self, default_ddl_collation: Option<String>) -> Self {
|
||||||
|
self.default_ddl_collation = default_ddl_collation;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn storage_serialization_policy(
|
||||||
|
mut self,
|
||||||
|
storage_serialization_policy: Option<StorageSerializationPolicy>,
|
||||||
|
) -> Self {
|
||||||
|
self.storage_serialization_policy = storage_serialization_policy;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn comment(mut self, comment: Option<String>) -> Self {
|
||||||
|
self.comment = comment;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn catalog_sync(mut self, catalog_sync: Option<String>) -> Self {
|
||||||
|
self.catalog_sync = catalog_sync;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn catalog_sync_namespace_mode(
|
||||||
|
mut self,
|
||||||
|
catalog_sync_namespace_mode: Option<CatalogSyncNamespaceMode>,
|
||||||
|
) -> Self {
|
||||||
|
self.catalog_sync_namespace_mode = catalog_sync_namespace_mode;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn catalog_sync_namespace_flatten_delimiter(
|
||||||
|
mut self,
|
||||||
|
catalog_sync_namespace_flatten_delimiter: Option<String>,
|
||||||
|
) -> Self {
|
||||||
|
self.catalog_sync_namespace_flatten_delimiter = catalog_sync_namespace_flatten_delimiter;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_tags(mut self, with_tags: Option<Vec<Tag>>) -> Self {
|
||||||
|
self.with_tags = with_tags;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_contacts(mut self, with_contacts: Option<Vec<ContactEntry>>) -> Self {
|
||||||
|
self.with_contacts = with_contacts;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build(self) -> Statement {
|
||||||
|
Statement::CreateDatabase {
|
||||||
|
db_name: self.db_name,
|
||||||
|
if_not_exists: self.if_not_exists,
|
||||||
|
managed_location: self.managed_location,
|
||||||
|
location: self.location,
|
||||||
|
or_replace: self.or_replace,
|
||||||
|
transient: self.transient,
|
||||||
|
clone: self.clone,
|
||||||
|
data_retention_time_in_days: self.data_retention_time_in_days,
|
||||||
|
max_data_extension_time_in_days: self.max_data_extension_time_in_days,
|
||||||
|
external_volume: self.external_volume,
|
||||||
|
catalog: self.catalog,
|
||||||
|
replace_invalid_characters: self.replace_invalid_characters,
|
||||||
|
default_ddl_collation: self.default_ddl_collation,
|
||||||
|
storage_serialization_policy: self.storage_serialization_policy,
|
||||||
|
comment: self.comment,
|
||||||
|
catalog_sync: self.catalog_sync,
|
||||||
|
catalog_sync_namespace_mode: self.catalog_sync_namespace_mode,
|
||||||
|
catalog_sync_namespace_flatten_delimiter: self.catalog_sync_namespace_flatten_delimiter,
|
||||||
|
with_tags: self.with_tags,
|
||||||
|
with_contacts: self.with_contacts,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<Statement> for CreateDatabaseBuilder {
|
||||||
|
type Error = ParserError;
|
||||||
|
|
||||||
|
fn try_from(stmt: Statement) -> Result<Self, Self::Error> {
|
||||||
|
match stmt {
|
||||||
|
Statement::CreateDatabase {
|
||||||
|
db_name,
|
||||||
|
if_not_exists,
|
||||||
|
location,
|
||||||
|
managed_location,
|
||||||
|
or_replace,
|
||||||
|
transient,
|
||||||
|
clone,
|
||||||
|
data_retention_time_in_days,
|
||||||
|
max_data_extension_time_in_days,
|
||||||
|
external_volume,
|
||||||
|
catalog,
|
||||||
|
replace_invalid_characters,
|
||||||
|
default_ddl_collation,
|
||||||
|
storage_serialization_policy,
|
||||||
|
comment,
|
||||||
|
catalog_sync,
|
||||||
|
catalog_sync_namespace_mode,
|
||||||
|
catalog_sync_namespace_flatten_delimiter,
|
||||||
|
with_tags,
|
||||||
|
with_contacts,
|
||||||
|
} => Ok(Self {
|
||||||
|
db_name,
|
||||||
|
if_not_exists,
|
||||||
|
location,
|
||||||
|
managed_location,
|
||||||
|
or_replace,
|
||||||
|
transient,
|
||||||
|
clone,
|
||||||
|
data_retention_time_in_days,
|
||||||
|
max_data_extension_time_in_days,
|
||||||
|
external_volume,
|
||||||
|
catalog,
|
||||||
|
replace_invalid_characters,
|
||||||
|
default_ddl_collation,
|
||||||
|
storage_serialization_policy,
|
||||||
|
comment,
|
||||||
|
catalog_sync,
|
||||||
|
catalog_sync_namespace_mode,
|
||||||
|
catalog_sync_namespace_flatten_delimiter,
|
||||||
|
with_tags,
|
||||||
|
with_contacts,
|
||||||
|
}),
|
||||||
|
_ => Err(ParserError::ParserError(format!(
|
||||||
|
"Expected create database statement, but received: {stmt}"
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::ast::helpers::stmt_create_database::CreateDatabaseBuilder;
|
||||||
|
use crate::ast::{Ident, ObjectName, Statement};
|
||||||
|
use crate::parser::ParserError;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_from_valid_statement() {
|
||||||
|
let builder = CreateDatabaseBuilder::new(ObjectName::from(vec![Ident::new("db_name")]));
|
||||||
|
|
||||||
|
let stmt = builder.clone().build();
|
||||||
|
|
||||||
|
assert_eq!(builder, CreateDatabaseBuilder::try_from(stmt).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_from_invalid_statement() {
|
||||||
|
let stmt = Statement::Commit {
|
||||||
|
chain: false,
|
||||||
|
end: false,
|
||||||
|
modifier: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
CreateDatabaseBuilder::try_from(stmt).unwrap_err(),
|
||||||
|
ParserError::ParserError(
|
||||||
|
"Expected create database statement, but received: COMMIT".to_owned()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -90,7 +90,7 @@ pub struct CreateTableBuilder {
|
||||||
pub primary_key: Option<Box<Expr>>,
|
pub primary_key: Option<Box<Expr>>,
|
||||||
pub order_by: Option<OneOrManyWithParens<Expr>>,
|
pub order_by: Option<OneOrManyWithParens<Expr>>,
|
||||||
pub partition_by: Option<Box<Expr>>,
|
pub partition_by: Option<Box<Expr>>,
|
||||||
pub cluster_by: Option<WrappedCollection<Vec<Ident>>>,
|
pub cluster_by: Option<WrappedCollection<Vec<Expr>>>,
|
||||||
pub clustered_by: Option<ClusteredBy>,
|
pub clustered_by: Option<ClusteredBy>,
|
||||||
pub inherits: Option<Vec<ObjectName>>,
|
pub inherits: Option<Vec<ObjectName>>,
|
||||||
pub strict: bool,
|
pub strict: bool,
|
||||||
|
@ -279,7 +279,7 @@ impl CreateTableBuilder {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cluster_by(mut self, cluster_by: Option<WrappedCollection<Vec<Ident>>>) -> Self {
|
pub fn cluster_by(mut self, cluster_by: Option<WrappedCollection<Vec<Expr>>>) -> Self {
|
||||||
self.cluster_by = cluster_by;
|
self.cluster_by = cluster_by;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
@ -383,6 +383,26 @@ impl CreateTableBuilder {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns true if the statement has exactly one source of info on the schema of the new table.
|
||||||
|
/// This is Snowflake-specific, some dialects allow more than one source.
|
||||||
|
pub(crate) fn validate_schema_info(&self) -> bool {
|
||||||
|
let mut sources = 0;
|
||||||
|
if !self.columns.is_empty() {
|
||||||
|
sources += 1;
|
||||||
|
}
|
||||||
|
if self.query.is_some() {
|
||||||
|
sources += 1;
|
||||||
|
}
|
||||||
|
if self.like.is_some() {
|
||||||
|
sources += 1;
|
||||||
|
}
|
||||||
|
if self.clone.is_some() {
|
||||||
|
sources += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
sources == 1
|
||||||
|
}
|
||||||
|
|
||||||
pub fn build(self) -> Statement {
|
pub fn build(self) -> Statement {
|
||||||
Statement::CreateTable(CreateTable {
|
Statement::CreateTable(CreateTable {
|
||||||
or_replace: self.or_replace,
|
or_replace: self.or_replace,
|
||||||
|
@ -542,7 +562,7 @@ impl TryFrom<Statement> for CreateTableBuilder {
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub(crate) struct CreateTableConfiguration {
|
pub(crate) struct CreateTableConfiguration {
|
||||||
pub partition_by: Option<Box<Expr>>,
|
pub partition_by: Option<Box<Expr>>,
|
||||||
pub cluster_by: Option<WrappedCollection<Vec<Ident>>>,
|
pub cluster_by: Option<WrappedCollection<Vec<Expr>>>,
|
||||||
pub inherits: Option<Vec<ObjectName>>,
|
pub inherits: Option<Vec<ObjectName>>,
|
||||||
pub table_options: CreateTableOptions,
|
pub table_options: CreateTableOptions,
|
||||||
}
|
}
|
||||||
|
|
932
src/ast/mod.rs
932
src/ast/mod.rs
File diff suppressed because it is too large
Load diff
245
src/ast/query.rs
245
src/ast/query.rs
|
@ -321,6 +321,11 @@ pub struct Select {
|
||||||
pub top_before_distinct: bool,
|
pub top_before_distinct: bool,
|
||||||
/// projection expressions
|
/// projection expressions
|
||||||
pub projection: Vec<SelectItem>,
|
pub projection: Vec<SelectItem>,
|
||||||
|
/// Excluded columns from the projection expression which are not specified
|
||||||
|
/// directly after a wildcard.
|
||||||
|
///
|
||||||
|
/// [Redshift](https://docs.aws.amazon.com/redshift/latest/dg/r_EXCLUDE_list.html)
|
||||||
|
pub exclude: Option<ExcludeSelectItem>,
|
||||||
/// INTO
|
/// INTO
|
||||||
pub into: Option<SelectInto>,
|
pub into: Option<SelectInto>,
|
||||||
/// FROM
|
/// FROM
|
||||||
|
@ -401,6 +406,10 @@ impl fmt::Display for Select {
|
||||||
indented_list(f, &self.projection)?;
|
indented_list(f, &self.projection)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(exclude) = &self.exclude {
|
||||||
|
write!(f, " {exclude}")?;
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(ref into) = self.into {
|
if let Some(ref into) = self.into {
|
||||||
f.write_str(" ")?;
|
f.write_str(" ")?;
|
||||||
into.fmt(f)?;
|
into.fmt(f)?;
|
||||||
|
@ -1047,7 +1056,7 @@ impl fmt::Display for ConnectBy {
|
||||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
pub struct Setting {
|
pub struct Setting {
|
||||||
pub key: Ident,
|
pub key: Ident,
|
||||||
pub value: Value,
|
pub value: Expr,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for Setting {
|
impl fmt::Display for Setting {
|
||||||
|
@ -1183,7 +1192,7 @@ impl fmt::Display for TableIndexHints {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{} {} ", self.hint_type, self.index_type)?;
|
write!(f, "{} {} ", self.hint_type, self.index_type)?;
|
||||||
if let Some(for_clause) = &self.for_clause {
|
if let Some(for_clause) = &self.for_clause {
|
||||||
write!(f, "FOR {} ", for_clause)?;
|
write!(f, "FOR {for_clause} ")?;
|
||||||
}
|
}
|
||||||
write!(f, "({})", display_comma_separated(&self.index_names))
|
write!(f, "({})", display_comma_separated(&self.index_names))
|
||||||
}
|
}
|
||||||
|
@ -1340,11 +1349,12 @@ pub enum TableFactor {
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// See <https://docs.snowflake.com/en/sql-reference/constructs/unpivot>.
|
/// See <https://docs.snowflake.com/en/sql-reference/constructs/unpivot>.
|
||||||
|
/// See <https://docs.databricks.com/aws/en/sql/language-manual/sql-ref-syntax-qry-select-unpivot>.
|
||||||
Unpivot {
|
Unpivot {
|
||||||
table: Box<TableFactor>,
|
table: Box<TableFactor>,
|
||||||
value: Ident,
|
value: Expr,
|
||||||
name: Ident,
|
name: Ident,
|
||||||
columns: Vec<Ident>,
|
columns: Vec<ExprWithAlias>,
|
||||||
null_inclusion: Option<NullInclusion>,
|
null_inclusion: Option<NullInclusion>,
|
||||||
alias: Option<TableAlias>,
|
alias: Option<TableAlias>,
|
||||||
},
|
},
|
||||||
|
@ -1459,7 +1469,7 @@ impl fmt::Display for TableSampleQuantity {
|
||||||
}
|
}
|
||||||
write!(f, "{}", self.value)?;
|
write!(f, "{}", self.value)?;
|
||||||
if let Some(unit) = &self.unit {
|
if let Some(unit) = &self.unit {
|
||||||
write!(f, " {}", unit)?;
|
write!(f, " {unit}")?;
|
||||||
}
|
}
|
||||||
if self.parenthesized {
|
if self.parenthesized {
|
||||||
write!(f, ")")?;
|
write!(f, ")")?;
|
||||||
|
@ -1552,7 +1562,7 @@ impl fmt::Display for TableSampleBucket {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "BUCKET {} OUT OF {}", self.bucket, self.total)?;
|
write!(f, "BUCKET {} OUT OF {}", self.bucket, self.total)?;
|
||||||
if let Some(on) = &self.on {
|
if let Some(on) = &self.on {
|
||||||
write!(f, " ON {}", on)?;
|
write!(f, " ON {on}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1561,19 +1571,19 @@ impl fmt::Display for TableSample {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "{}", self.modifier)?;
|
write!(f, "{}", self.modifier)?;
|
||||||
if let Some(name) = &self.name {
|
if let Some(name) = &self.name {
|
||||||
write!(f, " {}", name)?;
|
write!(f, " {name}")?;
|
||||||
}
|
}
|
||||||
if let Some(quantity) = &self.quantity {
|
if let Some(quantity) = &self.quantity {
|
||||||
write!(f, " {}", quantity)?;
|
write!(f, " {quantity}")?;
|
||||||
}
|
}
|
||||||
if let Some(seed) = &self.seed {
|
if let Some(seed) = &self.seed {
|
||||||
write!(f, " {}", seed)?;
|
write!(f, " {seed}")?;
|
||||||
}
|
}
|
||||||
if let Some(bucket) = &self.bucket {
|
if let Some(bucket) = &self.bucket {
|
||||||
write!(f, " ({})", bucket)?;
|
write!(f, " ({bucket})")?;
|
||||||
}
|
}
|
||||||
if let Some(offset) = &self.offset {
|
if let Some(offset) = &self.offset {
|
||||||
write!(f, " OFFSET {}", offset)?;
|
write!(f, " OFFSET {offset}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1651,7 +1661,7 @@ impl fmt::Display for RowsPerMatch {
|
||||||
RowsPerMatch::AllRows(mode) => {
|
RowsPerMatch::AllRows(mode) => {
|
||||||
write!(f, "ALL ROWS PER MATCH")?;
|
write!(f, "ALL ROWS PER MATCH")?;
|
||||||
if let Some(mode) = mode {
|
if let Some(mode) = mode {
|
||||||
write!(f, " {}", mode)?;
|
write!(f, " {mode}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1777,7 +1787,7 @@ impl fmt::Display for MatchRecognizePattern {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
use MatchRecognizePattern::*;
|
use MatchRecognizePattern::*;
|
||||||
match self {
|
match self {
|
||||||
Symbol(symbol) => write!(f, "{}", symbol),
|
Symbol(symbol) => write!(f, "{symbol}"),
|
||||||
Exclude(symbol) => write!(f, "{{- {symbol} -}}"),
|
Exclude(symbol) => write!(f, "{{- {symbol} -}}"),
|
||||||
Permute(symbols) => write!(f, "PERMUTE({})", display_comma_separated(symbols)),
|
Permute(symbols) => write!(f, "PERMUTE({})", display_comma_separated(symbols)),
|
||||||
Concat(patterns) => write!(f, "{}", display_separated(patterns, " ")),
|
Concat(patterns) => write!(f, "{}", display_separated(patterns, " ")),
|
||||||
|
@ -2148,7 +2158,7 @@ impl fmt::Display for TableAliasColumnDef {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}", self.name)?;
|
write!(f, "{}", self.name)?;
|
||||||
if let Some(ref data_type) = self.data_type {
|
if let Some(ref data_type) = self.data_type {
|
||||||
write!(f, " {}", data_type)?;
|
write!(f, " {data_type}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -2398,7 +2408,7 @@ impl fmt::Display for OrderBy {
|
||||||
write!(f, " {}", display_comma_separated(exprs))?;
|
write!(f, " {}", display_comma_separated(exprs))?;
|
||||||
}
|
}
|
||||||
OrderByKind::All(all) => {
|
OrderByKind::All(all) => {
|
||||||
write!(f, " ALL{}", all)?;
|
write!(f, " ALL{all}")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2429,7 +2439,7 @@ impl fmt::Display for OrderByExpr {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}{}", self.expr, self.options)?;
|
write!(f, "{}{}", self.expr, self.options)?;
|
||||||
if let Some(ref with_fill) = self.with_fill {
|
if let Some(ref with_fill) = self.with_fill {
|
||||||
write!(f, " {}", with_fill)?
|
write!(f, " {with_fill}")?
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -2452,13 +2462,13 @@ impl fmt::Display for WithFill {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "WITH FILL")?;
|
write!(f, "WITH FILL")?;
|
||||||
if let Some(ref from) = self.from {
|
if let Some(ref from) = self.from {
|
||||||
write!(f, " FROM {}", from)?;
|
write!(f, " FROM {from}")?;
|
||||||
}
|
}
|
||||||
if let Some(ref to) = self.to {
|
if let Some(ref to) = self.to {
|
||||||
write!(f, " TO {}", to)?;
|
write!(f, " TO {to}")?;
|
||||||
}
|
}
|
||||||
if let Some(ref step) = self.step {
|
if let Some(ref step) = self.step {
|
||||||
write!(f, " STEP {}", step)?;
|
write!(f, " STEP {step}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -2487,7 +2497,7 @@ impl fmt::Display for InterpolateExpr {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}", self.column)?;
|
write!(f, "{}", self.column)?;
|
||||||
if let Some(ref expr) = self.expr {
|
if let Some(ref expr) = self.expr {
|
||||||
write!(f, " AS {}", expr)?;
|
write!(f, " AS {expr}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -2565,7 +2575,7 @@ impl fmt::Display for LimitClause {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
LimitClause::OffsetCommaLimit { offset, limit } => {
|
LimitClause::OffsetCommaLimit { offset, limit } => {
|
||||||
write!(f, " LIMIT {}, {}", offset, limit)
|
write!(f, " LIMIT {offset}, {limit}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2684,6 +2694,79 @@ pub enum PipeOperator {
|
||||||
/// Syntax: `|> TABLESAMPLE SYSTEM (10 PERCENT)
|
/// Syntax: `|> TABLESAMPLE SYSTEM (10 PERCENT)
|
||||||
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#tablesample_pipe_operator>
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#tablesample_pipe_operator>
|
||||||
TableSample { sample: Box<TableSample> },
|
TableSample { sample: Box<TableSample> },
|
||||||
|
/// Renames columns in the input table.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> RENAME old_name AS new_name, ...`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#rename_pipe_operator>
|
||||||
|
Rename { mappings: Vec<IdentWithAlias> },
|
||||||
|
/// Combines the input table with one or more tables using UNION.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> UNION [ALL|DISTINCT] (<query>), (<query>), ...`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#union_pipe_operator>
|
||||||
|
Union {
|
||||||
|
set_quantifier: SetQuantifier,
|
||||||
|
queries: Vec<Query>,
|
||||||
|
},
|
||||||
|
/// Returns only the rows that are present in both the input table and the specified tables.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> INTERSECT [DISTINCT] (<query>), (<query>), ...`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#intersect_pipe_operator>
|
||||||
|
Intersect {
|
||||||
|
set_quantifier: SetQuantifier,
|
||||||
|
queries: Vec<Query>,
|
||||||
|
},
|
||||||
|
/// Returns only the rows that are present in the input table but not in the specified tables.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> EXCEPT DISTINCT (<query>), (<query>), ...`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#except_pipe_operator>
|
||||||
|
Except {
|
||||||
|
set_quantifier: SetQuantifier,
|
||||||
|
queries: Vec<Query>,
|
||||||
|
},
|
||||||
|
/// Calls a table function or procedure that returns a table.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> CALL function_name(args) [AS alias]`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#call_pipe_operator>
|
||||||
|
Call {
|
||||||
|
function: Function,
|
||||||
|
alias: Option<Ident>,
|
||||||
|
},
|
||||||
|
/// Pivots data from rows to columns.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> PIVOT(aggregate_function(column) FOR pivot_column IN (value1, value2, ...)) [AS alias]`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#pivot_pipe_operator>
|
||||||
|
Pivot {
|
||||||
|
aggregate_functions: Vec<ExprWithAlias>,
|
||||||
|
value_column: Vec<Ident>,
|
||||||
|
value_source: PivotValueSource,
|
||||||
|
alias: Option<Ident>,
|
||||||
|
},
|
||||||
|
/// The `UNPIVOT` pipe operator transforms columns into rows.
|
||||||
|
///
|
||||||
|
/// Syntax:
|
||||||
|
/// ```sql
|
||||||
|
/// |> UNPIVOT(value_column FOR name_column IN (column1, column2, ...)) [alias]
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#unpivot_pipe_operator>
|
||||||
|
Unpivot {
|
||||||
|
value_column: Ident,
|
||||||
|
name_column: Ident,
|
||||||
|
unpivot_columns: Vec<Ident>,
|
||||||
|
alias: Option<Ident>,
|
||||||
|
},
|
||||||
|
/// Joins the input table with another table.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> [JOIN_TYPE] JOIN <table> [alias] ON <condition>` or `|> [JOIN_TYPE] JOIN <table> [alias] USING (<columns>)`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#join_pipe_operator>
|
||||||
|
Join(Join),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for PipeOperator {
|
impl fmt::Display for PipeOperator {
|
||||||
|
@ -2702,12 +2785,12 @@ impl fmt::Display for PipeOperator {
|
||||||
write!(f, "DROP {}", display_comma_separated(columns.as_slice()))
|
write!(f, "DROP {}", display_comma_separated(columns.as_slice()))
|
||||||
}
|
}
|
||||||
PipeOperator::As { alias } => {
|
PipeOperator::As { alias } => {
|
||||||
write!(f, "AS {}", alias)
|
write!(f, "AS {alias}")
|
||||||
}
|
}
|
||||||
PipeOperator::Limit { expr, offset } => {
|
PipeOperator::Limit { expr, offset } => {
|
||||||
write!(f, "LIMIT {}", expr)?;
|
write!(f, "LIMIT {expr}")?;
|
||||||
if let Some(offset) = offset {
|
if let Some(offset) = offset {
|
||||||
write!(f, " OFFSET {}", offset)?;
|
write!(f, " OFFSET {offset}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -2730,17 +2813,97 @@ impl fmt::Display for PipeOperator {
|
||||||
}
|
}
|
||||||
|
|
||||||
PipeOperator::Where { expr } => {
|
PipeOperator::Where { expr } => {
|
||||||
write!(f, "WHERE {}", expr)
|
write!(f, "WHERE {expr}")
|
||||||
}
|
}
|
||||||
PipeOperator::OrderBy { exprs } => {
|
PipeOperator::OrderBy { exprs } => {
|
||||||
write!(f, "ORDER BY {}", display_comma_separated(exprs.as_slice()))
|
write!(f, "ORDER BY {}", display_comma_separated(exprs.as_slice()))
|
||||||
}
|
}
|
||||||
|
|
||||||
PipeOperator::TableSample { sample } => {
|
PipeOperator::TableSample { sample } => {
|
||||||
write!(f, "{}", sample)
|
write!(f, "{sample}")
|
||||||
|
}
|
||||||
|
PipeOperator::Rename { mappings } => {
|
||||||
|
write!(f, "RENAME {}", display_comma_separated(mappings))
|
||||||
|
}
|
||||||
|
PipeOperator::Union {
|
||||||
|
set_quantifier,
|
||||||
|
queries,
|
||||||
|
} => Self::fmt_set_operation(f, "UNION", set_quantifier, queries),
|
||||||
|
PipeOperator::Intersect {
|
||||||
|
set_quantifier,
|
||||||
|
queries,
|
||||||
|
} => Self::fmt_set_operation(f, "INTERSECT", set_quantifier, queries),
|
||||||
|
PipeOperator::Except {
|
||||||
|
set_quantifier,
|
||||||
|
queries,
|
||||||
|
} => Self::fmt_set_operation(f, "EXCEPT", set_quantifier, queries),
|
||||||
|
PipeOperator::Call { function, alias } => {
|
||||||
|
write!(f, "CALL {function}")?;
|
||||||
|
Self::fmt_optional_alias(f, alias)
|
||||||
|
}
|
||||||
|
PipeOperator::Pivot {
|
||||||
|
aggregate_functions,
|
||||||
|
value_column,
|
||||||
|
value_source,
|
||||||
|
alias,
|
||||||
|
} => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"PIVOT({} FOR {} IN ({}))",
|
||||||
|
display_comma_separated(aggregate_functions),
|
||||||
|
Expr::CompoundIdentifier(value_column.to_vec()),
|
||||||
|
value_source
|
||||||
|
)?;
|
||||||
|
Self::fmt_optional_alias(f, alias)
|
||||||
|
}
|
||||||
|
PipeOperator::Unpivot {
|
||||||
|
value_column,
|
||||||
|
name_column,
|
||||||
|
unpivot_columns,
|
||||||
|
alias,
|
||||||
|
} => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"UNPIVOT({} FOR {} IN ({}))",
|
||||||
|
value_column,
|
||||||
|
name_column,
|
||||||
|
display_comma_separated(unpivot_columns)
|
||||||
|
)?;
|
||||||
|
Self::fmt_optional_alias(f, alias)
|
||||||
|
}
|
||||||
|
PipeOperator::Join(join) => write!(f, "{join}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl PipeOperator {
|
||||||
|
/// Helper function to format optional alias for pipe operators
|
||||||
|
fn fmt_optional_alias(f: &mut fmt::Formatter<'_>, alias: &Option<Ident>) -> fmt::Result {
|
||||||
|
if let Some(alias) = alias {
|
||||||
|
write!(f, " AS {alias}")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper function to format set operations (UNION, INTERSECT, EXCEPT) with queries
|
||||||
|
fn fmt_set_operation(
|
||||||
|
f: &mut fmt::Formatter<'_>,
|
||||||
|
operation: &str,
|
||||||
|
set_quantifier: &SetQuantifier,
|
||||||
|
queries: &[Query],
|
||||||
|
) -> fmt::Result {
|
||||||
|
write!(f, "{operation}")?;
|
||||||
|
match set_quantifier {
|
||||||
|
SetQuantifier::None => {}
|
||||||
|
_ => {
|
||||||
|
write!(f, " {set_quantifier}")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
write!(f, " ")?;
|
||||||
|
let parenthesized_queries: Vec<String> =
|
||||||
|
queries.iter().map(|query| format!("({query})")).collect();
|
||||||
|
write!(f, "{}", display_comma_separated(&parenthesized_queries))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
|
@ -3016,7 +3179,7 @@ pub enum FormatClause {
|
||||||
impl fmt::Display for FormatClause {
|
impl fmt::Display for FormatClause {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
FormatClause::Identifier(ident) => write!(f, "FORMAT {}", ident),
|
FormatClause::Identifier(ident) => write!(f, "FORMAT {ident}"),
|
||||||
FormatClause::Null => write!(f, "FORMAT NULL"),
|
FormatClause::Null => write!(f, "FORMAT NULL"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3078,9 +3241,9 @@ impl fmt::Display for ForClause {
|
||||||
without_array_wrapper,
|
without_array_wrapper,
|
||||||
} => {
|
} => {
|
||||||
write!(f, "FOR JSON ")?;
|
write!(f, "FOR JSON ")?;
|
||||||
write!(f, "{}", for_json)?;
|
write!(f, "{for_json}")?;
|
||||||
if let Some(root) = root {
|
if let Some(root) = root {
|
||||||
write!(f, ", ROOT('{}')", root)?;
|
write!(f, ", ROOT('{root}')")?;
|
||||||
}
|
}
|
||||||
if *include_null_values {
|
if *include_null_values {
|
||||||
write!(f, ", INCLUDE_NULL_VALUES")?;
|
write!(f, ", INCLUDE_NULL_VALUES")?;
|
||||||
|
@ -3098,7 +3261,7 @@ impl fmt::Display for ForClause {
|
||||||
r#type,
|
r#type,
|
||||||
} => {
|
} => {
|
||||||
write!(f, "FOR XML ")?;
|
write!(f, "FOR XML ")?;
|
||||||
write!(f, "{}", for_xml)?;
|
write!(f, "{for_xml}")?;
|
||||||
if *binary_base64 {
|
if *binary_base64 {
|
||||||
write!(f, ", BINARY BASE64")?;
|
write!(f, ", BINARY BASE64")?;
|
||||||
}
|
}
|
||||||
|
@ -3106,7 +3269,7 @@ impl fmt::Display for ForClause {
|
||||||
write!(f, ", TYPE")?;
|
write!(f, ", TYPE")?;
|
||||||
}
|
}
|
||||||
if let Some(root) = root {
|
if let Some(root) = root {
|
||||||
write!(f, ", ROOT('{}')", root)?;
|
write!(f, ", ROOT('{root}')")?;
|
||||||
}
|
}
|
||||||
if *elements {
|
if *elements {
|
||||||
write!(f, ", ELEMENTS")?;
|
write!(f, ", ELEMENTS")?;
|
||||||
|
@ -3133,7 +3296,7 @@ impl fmt::Display for ForXml {
|
||||||
ForXml::Raw(root) => {
|
ForXml::Raw(root) => {
|
||||||
write!(f, "RAW")?;
|
write!(f, "RAW")?;
|
||||||
if let Some(root) = root {
|
if let Some(root) = root {
|
||||||
write!(f, "('{}')", root)?;
|
write!(f, "('{root}')")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -3142,7 +3305,7 @@ impl fmt::Display for ForXml {
|
||||||
ForXml::Path(root) => {
|
ForXml::Path(root) => {
|
||||||
write!(f, "PATH")?;
|
write!(f, "PATH")?;
|
||||||
if let Some(root) = root {
|
if let Some(root) = root {
|
||||||
write!(f, "('{}')", root)?;
|
write!(f, "('{root}')")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -3205,7 +3368,7 @@ impl fmt::Display for JsonTableColumn {
|
||||||
JsonTableColumn::Named(json_table_named_column) => {
|
JsonTableColumn::Named(json_table_named_column) => {
|
||||||
write!(f, "{json_table_named_column}")
|
write!(f, "{json_table_named_column}")
|
||||||
}
|
}
|
||||||
JsonTableColumn::ForOrdinality(ident) => write!(f, "{} FOR ORDINALITY", ident),
|
JsonTableColumn::ForOrdinality(ident) => write!(f, "{ident} FOR ORDINALITY"),
|
||||||
JsonTableColumn::Nested(json_table_nested_column) => {
|
JsonTableColumn::Nested(json_table_nested_column) => {
|
||||||
write!(f, "{json_table_nested_column}")
|
write!(f, "{json_table_nested_column}")
|
||||||
}
|
}
|
||||||
|
@ -3271,10 +3434,10 @@ impl fmt::Display for JsonTableNamedColumn {
|
||||||
self.path
|
self.path
|
||||||
)?;
|
)?;
|
||||||
if let Some(on_empty) = &self.on_empty {
|
if let Some(on_empty) = &self.on_empty {
|
||||||
write!(f, " {} ON EMPTY", on_empty)?;
|
write!(f, " {on_empty} ON EMPTY")?;
|
||||||
}
|
}
|
||||||
if let Some(on_error) = &self.on_error {
|
if let Some(on_error) = &self.on_error {
|
||||||
write!(f, " {} ON ERROR", on_error)?;
|
write!(f, " {on_error} ON ERROR")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -3296,7 +3459,7 @@ impl fmt::Display for JsonTableColumnErrorHandling {
|
||||||
match self {
|
match self {
|
||||||
JsonTableColumnErrorHandling::Null => write!(f, "NULL"),
|
JsonTableColumnErrorHandling::Null => write!(f, "NULL"),
|
||||||
JsonTableColumnErrorHandling::Default(json_string) => {
|
JsonTableColumnErrorHandling::Default(json_string) => {
|
||||||
write!(f, "DEFAULT {}", json_string)
|
write!(f, "DEFAULT {json_string}")
|
||||||
}
|
}
|
||||||
JsonTableColumnErrorHandling::Error => write!(f, "ERROR"),
|
JsonTableColumnErrorHandling::Error => write!(f, "ERROR"),
|
||||||
}
|
}
|
||||||
|
@ -3429,12 +3592,12 @@ impl fmt::Display for XmlTableColumn {
|
||||||
default,
|
default,
|
||||||
nullable,
|
nullable,
|
||||||
} => {
|
} => {
|
||||||
write!(f, " {}", r#type)?;
|
write!(f, " {type}")?;
|
||||||
if let Some(p) = path {
|
if let Some(p) = path {
|
||||||
write!(f, " PATH {}", p)?;
|
write!(f, " PATH {p}")?;
|
||||||
}
|
}
|
||||||
if let Some(d) = default {
|
if let Some(d) = default {
|
||||||
write!(f, " DEFAULT {}", d)?;
|
write!(f, " DEFAULT {d}")?;
|
||||||
}
|
}
|
||||||
if !*nullable {
|
if !*nullable {
|
||||||
write!(f, " NOT NULL")?;
|
write!(f, " NOT NULL")?;
|
||||||
|
@ -3465,7 +3628,7 @@ impl fmt::Display for XmlPassingArgument {
|
||||||
}
|
}
|
||||||
write!(f, "{}", self.expr)?;
|
write!(f, "{}", self.expr)?;
|
||||||
if let Some(alias) = &self.alias {
|
if let Some(alias) = &self.alias {
|
||||||
write!(f, " AS {}", alias)?;
|
write!(f, " AS {alias}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
117
src/ast/spans.rs
117
src/ast/spans.rs
|
@ -15,7 +15,7 @@
|
||||||
// specific language governing permissions and limitations
|
// specific language governing permissions and limitations
|
||||||
// under the License.
|
// under the License.
|
||||||
|
|
||||||
use crate::ast::query::SelectItemQualifiedWildcardKind;
|
use crate::ast::{query::SelectItemQualifiedWildcardKind, ColumnOptions, ExportData, TypedString};
|
||||||
use core::iter;
|
use core::iter;
|
||||||
|
|
||||||
use crate::tokenizer::Span;
|
use crate::tokenizer::Span;
|
||||||
|
@ -28,16 +28,17 @@ use super::{
|
||||||
ConstraintCharacteristics, CopySource, CreateIndex, CreateTable, CreateTableOptions, Cte,
|
ConstraintCharacteristics, CopySource, CreateIndex, CreateTable, CreateTableOptions, Cte,
|
||||||
Delete, DoUpdate, ExceptSelectItem, ExcludeSelectItem, Expr, ExprWithAlias, Fetch, FromTable,
|
Delete, DoUpdate, ExceptSelectItem, ExcludeSelectItem, Expr, ExprWithAlias, Fetch, FromTable,
|
||||||
Function, FunctionArg, FunctionArgExpr, FunctionArgumentClause, FunctionArgumentList,
|
Function, FunctionArg, FunctionArgExpr, FunctionArgumentClause, FunctionArgumentList,
|
||||||
FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, Insert, Interpolate,
|
FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, IndexColumn, Insert,
|
||||||
InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem, LateralView,
|
Interpolate, InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem,
|
||||||
LimitClause, MatchRecognizePattern, Measure, NamedParenthesizedList, NamedWindowDefinition,
|
LateralView, LimitClause, MatchRecognizePattern, Measure, NamedParenthesizedList,
|
||||||
ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction, OnInsert, OpenStatement,
|
NamedWindowDefinition, ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction,
|
||||||
OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource, ProjectionSelect, Query,
|
OnInsert, OpenStatement, OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource,
|
||||||
RaiseStatement, RaiseStatementValue, ReferentialAction, RenameSelectItem, ReplaceSelectElement,
|
ProjectionSelect, Query, RaiseStatement, RaiseStatementValue, ReferentialAction,
|
||||||
ReplaceSelectItem, Select, SelectInto, SelectItem, SetExpr, SqlOption, Statement, Subscript,
|
RenameSelectItem, ReplaceSelectElement, ReplaceSelectItem, Select, SelectInto, SelectItem,
|
||||||
SymbolDefinition, TableAlias, TableAliasColumnDef, TableConstraint, TableFactor, TableObject,
|
SetExpr, SqlOption, Statement, Subscript, SymbolDefinition, TableAlias, TableAliasColumnDef,
|
||||||
TableOptionsClustered, TableWithJoins, UpdateTableFromKind, Use, Value, Values, ViewColumnDef,
|
TableConstraint, TableFactor, TableObject, TableOptionsClustered, TableWithJoins,
|
||||||
WhileStatement, WildcardAdditionalOptions, With, WithFill,
|
UpdateTableFromKind, Use, Value, Values, ViewColumnDef, WhileStatement,
|
||||||
|
WildcardAdditionalOptions, With, WithFill,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Given an iterator of spans, return the [Span::union] of all spans.
|
/// Given an iterator of spans, return the [Span::union] of all spans.
|
||||||
|
@ -399,6 +400,7 @@ impl Spanned for Statement {
|
||||||
if_not_exists: _,
|
if_not_exists: _,
|
||||||
temporary: _,
|
temporary: _,
|
||||||
to,
|
to,
|
||||||
|
name_before_not_exists: _,
|
||||||
params: _,
|
params: _,
|
||||||
} => union_spans(
|
} => union_spans(
|
||||||
core::iter::once(name.span())
|
core::iter::once(name.span())
|
||||||
|
@ -422,6 +424,7 @@ impl Spanned for Statement {
|
||||||
Statement::CreateIndex(create_index) => create_index.span(),
|
Statement::CreateIndex(create_index) => create_index.span(),
|
||||||
Statement::CreateRole { .. } => Span::empty(),
|
Statement::CreateRole { .. } => Span::empty(),
|
||||||
Statement::CreateSecret { .. } => Span::empty(),
|
Statement::CreateSecret { .. } => Span::empty(),
|
||||||
|
Statement::CreateServer { .. } => Span::empty(),
|
||||||
Statement::CreateConnector { .. } => Span::empty(),
|
Statement::CreateConnector { .. } => Span::empty(),
|
||||||
Statement::AlterTable {
|
Statement::AlterTable {
|
||||||
name,
|
name,
|
||||||
|
@ -475,6 +478,7 @@ impl Spanned for Statement {
|
||||||
Statement::ShowColumns { .. } => Span::empty(),
|
Statement::ShowColumns { .. } => Span::empty(),
|
||||||
Statement::ShowTables { .. } => Span::empty(),
|
Statement::ShowTables { .. } => Span::empty(),
|
||||||
Statement::ShowCollation { .. } => Span::empty(),
|
Statement::ShowCollation { .. } => Span::empty(),
|
||||||
|
Statement::ShowCharset { .. } => Span::empty(),
|
||||||
Statement::Use(u) => u.span(),
|
Statement::Use(u) => u.span(),
|
||||||
Statement::StartTransaction { .. } => Span::empty(),
|
Statement::StartTransaction { .. } => Span::empty(),
|
||||||
Statement::Comment { .. } => Span::empty(),
|
Statement::Comment { .. } => Span::empty(),
|
||||||
|
@ -529,6 +533,18 @@ impl Spanned for Statement {
|
||||||
Statement::Print { .. } => Span::empty(),
|
Statement::Print { .. } => Span::empty(),
|
||||||
Statement::Return { .. } => Span::empty(),
|
Statement::Return { .. } => Span::empty(),
|
||||||
Statement::List(..) | Statement::Remove(..) => Span::empty(),
|
Statement::List(..) | Statement::Remove(..) => Span::empty(),
|
||||||
|
Statement::ExportData(ExportData {
|
||||||
|
options,
|
||||||
|
query,
|
||||||
|
connection,
|
||||||
|
}) => union_spans(
|
||||||
|
options
|
||||||
|
.iter()
|
||||||
|
.map(|i| i.span())
|
||||||
|
.chain(core::iter::once(query.span()))
|
||||||
|
.chain(connection.iter().map(|i| i.span())),
|
||||||
|
),
|
||||||
|
Statement::CreateUser(..) => Span::empty(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -650,7 +666,7 @@ impl Spanned for TableConstraint {
|
||||||
name.iter()
|
name.iter()
|
||||||
.map(|i| i.span)
|
.map(|i| i.span)
|
||||||
.chain(index_name.iter().map(|i| i.span))
|
.chain(index_name.iter().map(|i| i.span))
|
||||||
.chain(columns.iter().map(|i| i.span))
|
.chain(columns.iter().map(|i| i.span()))
|
||||||
.chain(characteristics.iter().map(|i| i.span())),
|
.chain(characteristics.iter().map(|i| i.span())),
|
||||||
),
|
),
|
||||||
TableConstraint::PrimaryKey {
|
TableConstraint::PrimaryKey {
|
||||||
|
@ -664,7 +680,7 @@ impl Spanned for TableConstraint {
|
||||||
name.iter()
|
name.iter()
|
||||||
.map(|i| i.span)
|
.map(|i| i.span)
|
||||||
.chain(index_name.iter().map(|i| i.span))
|
.chain(index_name.iter().map(|i| i.span))
|
||||||
.chain(columns.iter().map(|i| i.span))
|
.chain(columns.iter().map(|i| i.span()))
|
||||||
.chain(characteristics.iter().map(|i| i.span())),
|
.chain(characteristics.iter().map(|i| i.span())),
|
||||||
),
|
),
|
||||||
TableConstraint::ForeignKey {
|
TableConstraint::ForeignKey {
|
||||||
|
@ -700,7 +716,7 @@ impl Spanned for TableConstraint {
|
||||||
} => union_spans(
|
} => union_spans(
|
||||||
name.iter()
|
name.iter()
|
||||||
.map(|i| i.span)
|
.map(|i| i.span)
|
||||||
.chain(columns.iter().map(|i| i.span)),
|
.chain(columns.iter().map(|i| i.span())),
|
||||||
),
|
),
|
||||||
TableConstraint::FulltextOrSpatial {
|
TableConstraint::FulltextOrSpatial {
|
||||||
fulltext: _,
|
fulltext: _,
|
||||||
|
@ -711,7 +727,7 @@ impl Spanned for TableConstraint {
|
||||||
opt_index_name
|
opt_index_name
|
||||||
.iter()
|
.iter()
|
||||||
.map(|i| i.span)
|
.map(|i| i.span)
|
||||||
.chain(columns.iter().map(|i| i.span)),
|
.chain(columns.iter().map(|i| i.span())),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -745,6 +761,12 @@ impl Spanned for CreateIndex {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Spanned for IndexColumn {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.column.span()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Spanned for CaseStatement {
|
impl Spanned for CaseStatement {
|
||||||
fn span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
let CaseStatement {
|
let CaseStatement {
|
||||||
|
@ -917,6 +939,7 @@ impl Spanned for AlterColumnOperation {
|
||||||
AlterColumnOperation::SetDataType {
|
AlterColumnOperation::SetDataType {
|
||||||
data_type: _,
|
data_type: _,
|
||||||
using,
|
using,
|
||||||
|
had_set: _,
|
||||||
} => using.as_ref().map_or(Span::empty(), |u| u.span()),
|
} => using.as_ref().map_or(Span::empty(), |u| u.span()),
|
||||||
AlterColumnOperation::AddGenerated { .. } => Span::empty(),
|
AlterColumnOperation::AddGenerated { .. } => Span::empty(),
|
||||||
}
|
}
|
||||||
|
@ -984,10 +1007,13 @@ impl Spanned for ViewColumnDef {
|
||||||
options,
|
options,
|
||||||
} = self;
|
} = self;
|
||||||
|
|
||||||
union_spans(
|
name.span.union_opt(&options.as_ref().map(|o| o.span()))
|
||||||
core::iter::once(name.span)
|
}
|
||||||
.chain(options.iter().flat_map(|i| i.iter().map(|k| k.span()))),
|
}
|
||||||
)
|
|
||||||
|
impl Spanned for ColumnOptions {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
union_spans(self.as_slice().iter().map(|i| i.span()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1048,7 +1074,9 @@ impl Spanned for CreateTableOptions {
|
||||||
match self {
|
match self {
|
||||||
CreateTableOptions::None => Span::empty(),
|
CreateTableOptions::None => Span::empty(),
|
||||||
CreateTableOptions::With(vec) => union_spans(vec.iter().map(|i| i.span())),
|
CreateTableOptions::With(vec) => union_spans(vec.iter().map(|i| i.span())),
|
||||||
CreateTableOptions::Options(vec) => union_spans(vec.iter().map(|i| i.span())),
|
CreateTableOptions::Options(vec) => {
|
||||||
|
union_spans(vec.as_slice().iter().map(|i| i.span()))
|
||||||
|
}
|
||||||
CreateTableOptions::Plain(vec) => union_spans(vec.iter().map(|i| i.span())),
|
CreateTableOptions::Plain(vec) => union_spans(vec.iter().map(|i| i.span())),
|
||||||
CreateTableOptions::TableProperties(vec) => union_spans(vec.iter().map(|i| i.span())),
|
CreateTableOptions::TableProperties(vec) => union_spans(vec.iter().map(|i| i.span())),
|
||||||
}
|
}
|
||||||
|
@ -1062,7 +1090,10 @@ impl Spanned for CreateTableOptions {
|
||||||
impl Spanned for AlterTableOperation {
|
impl Spanned for AlterTableOperation {
|
||||||
fn span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
match self {
|
match self {
|
||||||
AlterTableOperation::AddConstraint(table_constraint) => table_constraint.span(),
|
AlterTableOperation::AddConstraint {
|
||||||
|
constraint,
|
||||||
|
not_valid: _,
|
||||||
|
} => constraint.span(),
|
||||||
AlterTableOperation::AddColumn {
|
AlterTableOperation::AddColumn {
|
||||||
column_keyword: _,
|
column_keyword: _,
|
||||||
if_not_exists: _,
|
if_not_exists: _,
|
||||||
|
@ -1095,10 +1126,10 @@ impl Spanned for AlterTableOperation {
|
||||||
} => name.span,
|
} => name.span,
|
||||||
AlterTableOperation::DropColumn {
|
AlterTableOperation::DropColumn {
|
||||||
has_column_keyword: _,
|
has_column_keyword: _,
|
||||||
column_name,
|
column_names,
|
||||||
if_exists: _,
|
if_exists: _,
|
||||||
drop_behavior: _,
|
drop_behavior: _,
|
||||||
} => column_name.span,
|
} => union_spans(column_names.iter().map(|i| i.span)),
|
||||||
AlterTableOperation::AttachPartition { partition } => partition.span(),
|
AlterTableOperation::AttachPartition { partition } => partition.span(),
|
||||||
AlterTableOperation::DetachPartition { partition } => partition.span(),
|
AlterTableOperation::DetachPartition { partition } => partition.span(),
|
||||||
AlterTableOperation::FreezePartition {
|
AlterTableOperation::FreezePartition {
|
||||||
|
@ -1183,6 +1214,10 @@ impl Spanned for AlterTableOperation {
|
||||||
AlterTableOperation::AutoIncrement { value, .. } => value.span(),
|
AlterTableOperation::AutoIncrement { value, .. } => value.span(),
|
||||||
AlterTableOperation::Lock { .. } => Span::empty(),
|
AlterTableOperation::Lock { .. } => Span::empty(),
|
||||||
AlterTableOperation::ReplicaIdentity { .. } => Span::empty(),
|
AlterTableOperation::ReplicaIdentity { .. } => Span::empty(),
|
||||||
|
AlterTableOperation::ValidateConstraint { name } => name.span,
|
||||||
|
AlterTableOperation::SetOptionsParens { options } => {
|
||||||
|
union_spans(options.iter().map(|i| i.span()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1398,7 +1433,6 @@ impl Spanned for AssignmentTarget {
|
||||||
/// f.e. `IS NULL <expr>` reports as `<expr>::span`.
|
/// f.e. `IS NULL <expr>` reports as `<expr>::span`.
|
||||||
///
|
///
|
||||||
/// Missing spans:
|
/// Missing spans:
|
||||||
/// - [Expr::TypedString] # missing span for data_type
|
|
||||||
/// - [Expr::MatchAgainst] # MySQL specific
|
/// - [Expr::MatchAgainst] # MySQL specific
|
||||||
/// - [Expr::RLike] # MySQL specific
|
/// - [Expr::RLike] # MySQL specific
|
||||||
/// - [Expr::Struct] # BigQuery specific
|
/// - [Expr::Struct] # BigQuery specific
|
||||||
|
@ -1493,7 +1527,7 @@ impl Spanned for Expr {
|
||||||
.union(&union_spans(collation.0.iter().map(|i| i.span()))),
|
.union(&union_spans(collation.0.iter().map(|i| i.span()))),
|
||||||
Expr::Nested(expr) => expr.span(),
|
Expr::Nested(expr) => expr.span(),
|
||||||
Expr::Value(value) => value.span(),
|
Expr::Value(value) => value.span(),
|
||||||
Expr::TypedString { value, .. } => value.span(),
|
Expr::TypedString(TypedString { value, .. }) => value.span(),
|
||||||
Expr::Function(function) => function.span(),
|
Expr::Function(function) => function.span(),
|
||||||
Expr::GroupingSets(vec) => {
|
Expr::GroupingSets(vec) => {
|
||||||
union_spans(vec.iter().flat_map(|i| i.iter().map(|k| k.span())))
|
union_spans(vec.iter().flat_map(|i| i.iter().map(|k| k.span())))
|
||||||
|
@ -1607,6 +1641,7 @@ impl Spanned for Expr {
|
||||||
Expr::OuterJoin(expr) => expr.span(),
|
Expr::OuterJoin(expr) => expr.span(),
|
||||||
Expr::Prior(expr) => expr.span(),
|
Expr::Prior(expr) => expr.span(),
|
||||||
Expr::Lambda(_) => Span::empty(),
|
Expr::Lambda(_) => Span::empty(),
|
||||||
|
Expr::MemberOf(member_of) => member_of.value.span().union(&member_of.array.span()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1653,6 +1688,10 @@ impl Spanned for ObjectNamePart {
|
||||||
fn span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
match self {
|
match self {
|
||||||
ObjectNamePart::Identifier(ident) => ident.span,
|
ObjectNamePart::Identifier(ident) => ident.span,
|
||||||
|
ObjectNamePart::Function(func) => func
|
||||||
|
.name
|
||||||
|
.span
|
||||||
|
.union(&union_spans(func.args.iter().map(|i| i.span()))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1963,9 +2002,9 @@ impl Spanned for TableFactor {
|
||||||
alias,
|
alias,
|
||||||
} => union_spans(
|
} => union_spans(
|
||||||
core::iter::once(table.span())
|
core::iter::once(table.span())
|
||||||
.chain(core::iter::once(value.span))
|
.chain(core::iter::once(value.span()))
|
||||||
.chain(core::iter::once(name.span))
|
.chain(core::iter::once(name.span))
|
||||||
.chain(columns.iter().map(|i| i.span))
|
.chain(columns.iter().map(|ilist| ilist.span()))
|
||||||
.chain(alias.as_ref().map(|alias| alias.span())),
|
.chain(alias.as_ref().map(|alias| alias.span())),
|
||||||
),
|
),
|
||||||
TableFactor::MatchRecognize {
|
TableFactor::MatchRecognize {
|
||||||
|
@ -2198,6 +2237,7 @@ impl Spanned for Select {
|
||||||
distinct: _, // todo
|
distinct: _, // todo
|
||||||
top: _, // todo, mysql specific
|
top: _, // todo, mysql specific
|
||||||
projection,
|
projection,
|
||||||
|
exclude: _,
|
||||||
into,
|
into,
|
||||||
from,
|
from,
|
||||||
lateral_views,
|
lateral_views,
|
||||||
|
@ -2485,4 +2525,27 @@ pub mod tests {
|
||||||
"CASE 1 WHEN 2 THEN 3 ELSE 4 END"
|
"CASE 1 WHEN 2 THEN 3 ELSE 4 END"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_placeholder_span() {
|
||||||
|
let sql = "\nSELECT\n :fooBar";
|
||||||
|
let r = Parser::parse_sql(&GenericDialect, sql).unwrap();
|
||||||
|
assert_eq!(1, r.len());
|
||||||
|
match &r[0] {
|
||||||
|
Statement::Query(q) => {
|
||||||
|
let col = &q.body.as_select().unwrap().projection[0];
|
||||||
|
match col {
|
||||||
|
SelectItem::UnnamedExpr(Expr::Value(ValueWithSpan {
|
||||||
|
value: Value::Placeholder(s),
|
||||||
|
span,
|
||||||
|
})) => {
|
||||||
|
assert_eq!(":fooBar", s);
|
||||||
|
assert_eq!(&Span::new((3, 3).into(), (3, 10).into()), span);
|
||||||
|
}
|
||||||
|
_ => panic!("expected unnamed expression; got {col:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stmt => panic!("expected query; got {stmt:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -116,7 +116,6 @@ impl From<ValueWithSpan> for Value {
|
||||||
derive(Visit, VisitMut),
|
derive(Visit, VisitMut),
|
||||||
visit(with = "visit_value")
|
visit(with = "visit_value")
|
||||||
)]
|
)]
|
||||||
|
|
||||||
pub enum Value {
|
pub enum Value {
|
||||||
/// Numeric literal
|
/// Numeric literal
|
||||||
#[cfg(not(feature = "bigdecimal"))]
|
#[cfg(not(feature = "bigdecimal"))]
|
||||||
|
@ -551,16 +550,16 @@ impl fmt::Display for EscapeUnicodeStringLiteral<'_> {
|
||||||
write!(f, r#"\\"#)?;
|
write!(f, r#"\\"#)?;
|
||||||
}
|
}
|
||||||
x if x.is_ascii() => {
|
x if x.is_ascii() => {
|
||||||
write!(f, "{}", c)?;
|
write!(f, "{c}")?;
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let codepoint = c as u32;
|
let codepoint = c as u32;
|
||||||
// if the character fits in 32 bits, we can use the \XXXX format
|
// if the character fits in 32 bits, we can use the \XXXX format
|
||||||
// otherwise, we need to use the \+XXXXXX format
|
// otherwise, we need to use the \+XXXXXX format
|
||||||
if codepoint <= 0xFFFF {
|
if codepoint <= 0xFFFF {
|
||||||
write!(f, "\\{:04X}", codepoint)?;
|
write!(f, "\\{codepoint:04X}")?;
|
||||||
} else {
|
} else {
|
||||||
write!(f, "\\+{:06X}", codepoint)?;
|
write!(f, "\\+{codepoint:06X}")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -926,10 +926,10 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn overflow() {
|
fn overflow() {
|
||||||
let cond = (0..1000)
|
let cond = (0..1000)
|
||||||
.map(|n| format!("X = {}", n))
|
.map(|n| format!("X = {n}"))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(" OR ");
|
.join(" OR ");
|
||||||
let sql = format!("SELECT x where {0}", cond);
|
let sql = format!("SELECT x where {cond}");
|
||||||
|
|
||||||
let dialect = GenericDialect {};
|
let dialect = GenericDialect {};
|
||||||
let tokens = Tokenizer::new(&dialect, sql.as_str()).tokenize().unwrap();
|
let tokens = Tokenizer::new(&dialect, sql.as_str()).tokenize().unwrap();
|
||||||
|
|
|
@ -19,6 +19,7 @@ use crate::ast::Statement;
|
||||||
use crate::dialect::Dialect;
|
use crate::dialect::Dialect;
|
||||||
use crate::keywords::Keyword;
|
use crate::keywords::Keyword;
|
||||||
use crate::parser::{Parser, ParserError};
|
use crate::parser::{Parser, ParserError};
|
||||||
|
use crate::tokenizer::Token;
|
||||||
|
|
||||||
/// These keywords are disallowed as column identifiers. Such that
|
/// These keywords are disallowed as column identifiers. Such that
|
||||||
/// `SELECT 5 AS <col> FROM T` is rejected by BigQuery.
|
/// `SELECT 5 AS <col> FROM T` is rejected by BigQuery.
|
||||||
|
@ -46,7 +47,18 @@ pub struct BigQueryDialect;
|
||||||
|
|
||||||
impl Dialect for BigQueryDialect {
|
impl Dialect for BigQueryDialect {
|
||||||
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||||
self.maybe_parse_statement(parser)
|
if parser.parse_keyword(Keyword::BEGIN) {
|
||||||
|
if parser.peek_keyword(Keyword::TRANSACTION)
|
||||||
|
|| parser.peek_token_ref().token == Token::SemiColon
|
||||||
|
|| parser.peek_token_ref().token == Token::EOF
|
||||||
|
{
|
||||||
|
parser.prev_token();
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
return Some(parser.parse_begin_exception_end());
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#identifiers>
|
/// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#identifiers>
|
||||||
|
@ -140,49 +152,8 @@ impl Dialect for BigQueryDialect {
|
||||||
fn supports_pipe_operator(&self) -> bool {
|
fn supports_pipe_operator(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl BigQueryDialect {
|
fn supports_create_table_multi_schema_info_sources(&self) -> bool {
|
||||||
fn maybe_parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
true
|
||||||
if parser.peek_keyword(Keyword::BEGIN) {
|
|
||||||
return Some(self.parse_begin(parser));
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse a `BEGIN` statement.
|
|
||||||
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend>
|
|
||||||
fn parse_begin(&self, parser: &mut Parser) -> Result<Statement, ParserError> {
|
|
||||||
parser.expect_keyword(Keyword::BEGIN)?;
|
|
||||||
|
|
||||||
let statements = parser.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
|
|
||||||
|
|
||||||
let has_exception_when_clause = parser.parse_keywords(&[
|
|
||||||
Keyword::EXCEPTION,
|
|
||||||
Keyword::WHEN,
|
|
||||||
Keyword::ERROR,
|
|
||||||
Keyword::THEN,
|
|
||||||
]);
|
|
||||||
let exception_statements = if has_exception_when_clause {
|
|
||||||
if !parser.peek_keyword(Keyword::END) {
|
|
||||||
Some(parser.parse_statement_list(&[Keyword::END])?)
|
|
||||||
} else {
|
|
||||||
Some(Default::default())
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
parser.expect_keyword(Keyword::END)?;
|
|
||||||
|
|
||||||
Ok(Statement::StartTransaction {
|
|
||||||
begin: true,
|
|
||||||
statements,
|
|
||||||
exception_statements,
|
|
||||||
has_end_keyword: true,
|
|
||||||
transaction: None,
|
|
||||||
modifier: None,
|
|
||||||
modes: Default::default(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -65,7 +65,7 @@ impl Dialect for DuckDbDialect {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See <https://duckdb.org/docs/sql/functions/lambda.html>
|
/// See <https://duckdb.org/docs/stable/sql/functions/lambda>
|
||||||
fn supports_lambda_functions(&self) -> bool {
|
fn supports_lambda_functions(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
@ -94,4 +94,14 @@ impl Dialect for DuckDbDialect {
|
||||||
fn supports_order_by_all(&self) -> bool {
|
fn supports_order_by_all(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_select_wildcard_exclude(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
/// DuckDB supports `NOTNULL` as an alias for `IS NOT NULL`,
|
||||||
|
/// see DuckDB Comparisons <https://duckdb.org/docs/stable/sql/expressions/comparison_operators#between-and-is-not-null>
|
||||||
|
fn supports_notnull_operator(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,6 +52,10 @@ impl Dialect for GenericDialect {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_left_associative_joins_without_parens(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
fn supports_connect_by(&self) -> bool {
|
fn supports_connect_by(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
@ -108,6 +112,14 @@ impl Dialect for GenericDialect {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_from_first_select(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn supports_projection_trailing_commas(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
fn supports_asc_desc_in_column_definition(&self) -> bool {
|
fn supports_asc_desc_in_column_definition(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
@ -167,4 +179,16 @@ impl Dialect for GenericDialect {
|
||||||
fn supports_filter_during_aggregation(&self) -> bool {
|
fn supports_filter_during_aggregation(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_select_wildcard_exclude(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn supports_data_type_signed_suffix(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn supports_interval_options(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,7 @@ pub use self::postgresql::PostgreSqlDialect;
|
||||||
pub use self::redshift::RedshiftSqlDialect;
|
pub use self::redshift::RedshiftSqlDialect;
|
||||||
pub use self::snowflake::SnowflakeDialect;
|
pub use self::snowflake::SnowflakeDialect;
|
||||||
pub use self::sqlite::SQLiteDialect;
|
pub use self::sqlite::SQLiteDialect;
|
||||||
use crate::ast::{ColumnOption, Expr, GranteesType, Statement};
|
use crate::ast::{ColumnOption, Expr, GranteesType, Ident, ObjectNamePart, Statement};
|
||||||
pub use crate::keywords;
|
pub use crate::keywords;
|
||||||
use crate::keywords::Keyword;
|
use crate::keywords::Keyword;
|
||||||
use crate::parser::{Parser, ParserError};
|
use crate::parser::{Parser, ParserError};
|
||||||
|
@ -278,6 +278,34 @@ pub trait Dialect: Debug + Any {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Indicates whether the dialect supports left-associative join parsing
|
||||||
|
/// by default when parentheses are omitted in nested joins.
|
||||||
|
///
|
||||||
|
/// Most dialects (like MySQL or Postgres) assume **left-associative** precedence,
|
||||||
|
/// so a query like:
|
||||||
|
///
|
||||||
|
/// ```sql
|
||||||
|
/// SELECT * FROM t1 NATURAL JOIN t5 INNER JOIN t0 ON ...
|
||||||
|
/// ```
|
||||||
|
/// is interpreted as:
|
||||||
|
/// ```sql
|
||||||
|
/// ((t1 NATURAL JOIN t5) INNER JOIN t0 ON ...)
|
||||||
|
/// ```
|
||||||
|
/// and internally represented as a **flat list** of joins.
|
||||||
|
///
|
||||||
|
/// In contrast, some dialects (e.g. **Snowflake**) assume **right-associative**
|
||||||
|
/// precedence and interpret the same query as:
|
||||||
|
/// ```sql
|
||||||
|
/// (t1 NATURAL JOIN (t5 INNER JOIN t0 ON ...))
|
||||||
|
/// ```
|
||||||
|
/// which results in a **nested join** structure in the AST.
|
||||||
|
///
|
||||||
|
/// If this method returns `false`, the parser must build nested join trees
|
||||||
|
/// even in the absence of parentheses to reflect the correct associativity
|
||||||
|
fn supports_left_associative_joins_without_parens(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true if the dialect supports the `(+)` syntax for OUTER JOIN.
|
/// Returns true if the dialect supports the `(+)` syntax for OUTER JOIN.
|
||||||
fn supports_outer_join_operator(&self) -> bool {
|
fn supports_outer_join_operator(&self) -> bool {
|
||||||
false
|
false
|
||||||
|
@ -542,6 +570,33 @@ pub trait Dialect: Debug + Any {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns true if the dialect supports an exclude option
|
||||||
|
/// following a wildcard in the projection section. For example:
|
||||||
|
/// `SELECT * EXCLUDE col1 FROM tbl`.
|
||||||
|
///
|
||||||
|
/// [Redshift](https://docs.aws.amazon.com/redshift/latest/dg/r_EXCLUDE_list.html)
|
||||||
|
/// [Snowflake](https://docs.snowflake.com/en/sql-reference/sql/select)
|
||||||
|
fn supports_select_wildcard_exclude(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the dialect supports an exclude option
|
||||||
|
/// as the last item in the projection section, not necessarily
|
||||||
|
/// after a wildcard. For example:
|
||||||
|
/// `SELECT *, c1, c2 EXCLUDE c3 FROM tbl`
|
||||||
|
///
|
||||||
|
/// [Redshift](https://docs.aws.amazon.com/redshift/latest/dg/r_EXCLUDE_list.html)
|
||||||
|
fn supports_select_exclude(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returne true if the dialect supports specifying multiple options
|
||||||
|
/// in a `CREATE TABLE` statement for the structure of the new table. For example:
|
||||||
|
/// `CREATE TABLE t (a INT, b INT) AS SELECT 1 AS b, 2 AS a`
|
||||||
|
fn supports_create_table_multi_schema_info_sources(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
/// Dialect-specific infix parser override
|
/// Dialect-specific infix parser override
|
||||||
///
|
///
|
||||||
/// This method is called to parse the next infix expression.
|
/// This method is called to parse the next infix expression.
|
||||||
|
@ -587,7 +642,7 @@ pub trait Dialect: Debug + Any {
|
||||||
}
|
}
|
||||||
|
|
||||||
let token = parser.peek_token();
|
let token = parser.peek_token();
|
||||||
debug!("get_next_precedence_full() {:?}", token);
|
debug!("get_next_precedence_full() {token:?}");
|
||||||
match token.token {
|
match token.token {
|
||||||
Token::Word(w) if w.keyword == Keyword::OR => Ok(p!(Or)),
|
Token::Word(w) if w.keyword == Keyword::OR => Ok(p!(Or)),
|
||||||
Token::Word(w) if w.keyword == Keyword::AND => Ok(p!(And)),
|
Token::Word(w) if w.keyword == Keyword::AND => Ok(p!(And)),
|
||||||
|
@ -621,8 +676,17 @@ pub trait Dialect: Debug + Any {
|
||||||
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
|
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
|
||||||
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
|
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
|
||||||
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
|
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
|
||||||
|
Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)),
|
||||||
|
Token::Word(w)
|
||||||
|
if w.keyword == Keyword::NULL && !parser.in_column_definition_state() =>
|
||||||
|
{
|
||||||
|
Ok(p!(Is))
|
||||||
|
}
|
||||||
_ => Ok(self.prec_unknown()),
|
_ => Ok(self.prec_unknown()),
|
||||||
},
|
},
|
||||||
|
Token::Word(w) if w.keyword == Keyword::NOTNULL && self.supports_notnull_operator() => {
|
||||||
|
Ok(p!(Is))
|
||||||
|
}
|
||||||
Token::Word(w) if w.keyword == Keyword::IS => Ok(p!(Is)),
|
Token::Word(w) if w.keyword == Keyword::IS => Ok(p!(Is)),
|
||||||
Token::Word(w) if w.keyword == Keyword::IN => Ok(p!(Between)),
|
Token::Word(w) if w.keyword == Keyword::IN => Ok(p!(Between)),
|
||||||
Token::Word(w) if w.keyword == Keyword::BETWEEN => Ok(p!(Between)),
|
Token::Word(w) if w.keyword == Keyword::BETWEEN => Ok(p!(Between)),
|
||||||
|
@ -633,6 +697,7 @@ pub trait Dialect: Debug + Any {
|
||||||
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
|
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
|
||||||
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
|
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
|
||||||
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
|
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
|
||||||
|
Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)),
|
||||||
Token::Word(w) if w.keyword == Keyword::OPERATOR => Ok(p!(Between)),
|
Token::Word(w) if w.keyword == Keyword::OPERATOR => Ok(p!(Between)),
|
||||||
Token::Word(w) if w.keyword == Keyword::DIV => Ok(p!(MulDivModOp)),
|
Token::Word(w) if w.keyword == Keyword::DIV => Ok(p!(MulDivModOp)),
|
||||||
Token::Period => Ok(p!(Period)),
|
Token::Period => Ok(p!(Period)),
|
||||||
|
@ -898,12 +963,6 @@ pub trait Dialect: Debug + Any {
|
||||||
keywords::RESERVED_FOR_IDENTIFIER.contains(&kw)
|
keywords::RESERVED_FOR_IDENTIFIER.contains(&kw)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns reserved keywords when looking to parse a `TableFactor`.
|
|
||||||
/// See [Self::supports_from_trailing_commas]
|
|
||||||
fn get_reserved_keywords_for_table_factor(&self) -> &[Keyword] {
|
|
||||||
keywords::RESERVED_FOR_TABLE_FACTOR
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns reserved keywords that may prefix a select item expression
|
/// Returns reserved keywords that may prefix a select item expression
|
||||||
/// e.g. `SELECT CONNECT_BY_ROOT name FROM Tbl2` (Snowflake)
|
/// e.g. `SELECT CONNECT_BY_ROOT name FROM Tbl2` (Snowflake)
|
||||||
fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
|
fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
|
||||||
|
@ -962,11 +1021,23 @@ pub trait Dialect: Debug + Any {
|
||||||
explicit || self.is_column_alias(kw, parser)
|
explicit || self.is_column_alias(kw, parser)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns true if the specified keyword should be parsed as a table factor identifier.
|
||||||
|
/// See [keywords::RESERVED_FOR_TABLE_FACTOR]
|
||||||
|
fn is_table_factor(&self, kw: &Keyword, _parser: &mut Parser) -> bool {
|
||||||
|
!keywords::RESERVED_FOR_TABLE_FACTOR.contains(kw)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the specified keyword should be parsed as a table factor alias.
|
||||||
|
/// See [keywords::RESERVED_FOR_TABLE_ALIAS]
|
||||||
|
fn is_table_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool {
|
||||||
|
!keywords::RESERVED_FOR_TABLE_ALIAS.contains(kw)
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true if the specified keyword should be parsed as a table factor alias.
|
/// Returns true if the specified keyword should be parsed as a table factor alias.
|
||||||
/// When explicit is true, the keyword is preceded by an `AS` word. Parser is provided
|
/// When explicit is true, the keyword is preceded by an `AS` word. Parser is provided
|
||||||
/// to enable looking ahead if needed.
|
/// to enable looking ahead if needed.
|
||||||
fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
|
fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
|
||||||
explicit || !keywords::RESERVED_FOR_TABLE_ALIAS.contains(kw)
|
explicit || self.is_table_alias(kw, parser)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns true if this dialect supports querying historical table data
|
/// Returns true if this dialect supports querying historical table data
|
||||||
|
@ -1028,6 +1099,70 @@ pub trait Dialect: Debug + Any {
|
||||||
fn supports_set_names(&self) -> bool {
|
fn supports_set_names(&self) -> bool {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_space_separated_column_options(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the dialect supports the `USING` clause in an `ALTER COLUMN` statement.
|
||||||
|
/// Example:
|
||||||
|
/// ```sql
|
||||||
|
/// ALTER TABLE tbl ALTER COLUMN col SET DATA TYPE <type> USING <exp>`
|
||||||
|
/// ```
|
||||||
|
fn supports_alter_column_type_using(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the dialect supports `ALTER TABLE tbl DROP COLUMN c1, ..., cn`
|
||||||
|
fn supports_comma_separated_drop_column_list(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the dialect considers the specified ident as a function
|
||||||
|
/// that returns an identifier. Typically used to generate identifiers
|
||||||
|
/// programmatically.
|
||||||
|
///
|
||||||
|
/// - [Snowflake](https://docs.snowflake.com/en/sql-reference/identifier-literal)
|
||||||
|
fn is_identifier_generating_function_name(
|
||||||
|
&self,
|
||||||
|
_ident: &Ident,
|
||||||
|
_name_parts: &[ObjectNamePart],
|
||||||
|
) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the dialect supports the `x NOTNULL`
|
||||||
|
/// operator expression.
|
||||||
|
fn supports_notnull_operator(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if this dialect allows an optional `SIGNED` suffix after integer data types.
|
||||||
|
///
|
||||||
|
/// Example:
|
||||||
|
/// ```sql
|
||||||
|
/// CREATE TABLE t (i INT(20) SIGNED);
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// Note that this is canonicalized to `INT(20)`.
|
||||||
|
fn supports_data_type_signed_suffix(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the dialect supports the `INTERVAL` data type with [Postgres]-style options.
|
||||||
|
///
|
||||||
|
/// Examples:
|
||||||
|
/// ```sql
|
||||||
|
/// CREATE TABLE t (i INTERVAL YEAR TO MONTH);
|
||||||
|
/// SELECT '1 second'::INTERVAL HOUR TO SECOND(3);
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// See [`crate::ast::DataType::Interval`] and [`crate::ast::IntervalFields`].
|
||||||
|
///
|
||||||
|
/// [Postgres]: https://www.postgresql.org/docs/17/datatype-datetime.html
|
||||||
|
fn supports_interval_options(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This represents the operators for which precedence must be defined
|
/// This represents the operators for which precedence must be defined
|
||||||
|
|
|
@ -43,15 +43,19 @@ impl Dialect for MySqlDialect {
|
||||||
// See https://dev.mysql.com/doc/refman/8.0/en/identifiers.html.
|
// See https://dev.mysql.com/doc/refman/8.0/en/identifiers.html.
|
||||||
// Identifiers which begin with a digit are recognized while tokenizing numbers,
|
// Identifiers which begin with a digit are recognized while tokenizing numbers,
|
||||||
// so they can be distinguished from exponent numeric literals.
|
// so they can be distinguished from exponent numeric literals.
|
||||||
|
// MySQL also implements non ascii utf-8 charecters
|
||||||
ch.is_alphabetic()
|
ch.is_alphabetic()
|
||||||
|| ch == '_'
|
|| ch == '_'
|
||||||
|| ch == '$'
|
|| ch == '$'
|
||||||
|| ch == '@'
|
|| ch == '@'
|
||||||
|| ('\u{0080}'..='\u{ffff}').contains(&ch)
|
|| ('\u{0080}'..='\u{ffff}').contains(&ch)
|
||||||
|
|| !ch.is_ascii()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_identifier_part(&self, ch: char) -> bool {
|
fn is_identifier_part(&self, ch: char) -> bool {
|
||||||
self.is_identifier_start(ch) || ch.is_ascii_digit()
|
self.is_identifier_start(ch) || ch.is_ascii_digit() ||
|
||||||
|
// MySQL implements Unicode characters in identifiers.
|
||||||
|
!ch.is_ascii()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_delimited_identifier_start(&self, ch: char) -> bool {
|
fn is_delimited_identifier_start(&self, ch: char) -> bool {
|
||||||
|
@ -150,6 +154,10 @@ impl Dialect for MySqlDialect {
|
||||||
fn supports_comma_separated_set_assignments(&self) -> bool {
|
fn supports_comma_separated_set_assignments(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_data_type_signed_suffix(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `LOCK TABLES`
|
/// `LOCK TABLES`
|
||||||
|
|
|
@ -65,14 +65,15 @@ impl Dialect for PostgreSqlDialect {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_identifier_start(&self, ch: char) -> bool {
|
fn is_identifier_start(&self, ch: char) -> bool {
|
||||||
// See https://www.postgresql.org/docs/11/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
|
ch.is_alphabetic() || ch == '_' ||
|
||||||
// We don't yet support identifiers beginning with "letters with
|
// PostgreSQL implements Unicode characters in identifiers.
|
||||||
// diacritical marks"
|
!ch.is_ascii()
|
||||||
ch.is_alphabetic() || ch == '_'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_identifier_part(&self, ch: char) -> bool {
|
fn is_identifier_part(&self, ch: char) -> bool {
|
||||||
ch.is_alphabetic() || ch.is_ascii_digit() || ch == '$' || ch == '_'
|
ch.is_alphabetic() || ch.is_ascii_digit() || ch == '$' || ch == '_' ||
|
||||||
|
// PostgreSQL implements Unicode characters in identifiers.
|
||||||
|
!ch.is_ascii()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn supports_unicode_string_literal(&self) -> bool {
|
fn supports_unicode_string_literal(&self) -> bool {
|
||||||
|
@ -104,7 +105,7 @@ impl Dialect for PostgreSqlDialect {
|
||||||
|
|
||||||
fn get_next_precedence(&self, parser: &Parser) -> Option<Result<u8, ParserError>> {
|
fn get_next_precedence(&self, parser: &Parser) -> Option<Result<u8, ParserError>> {
|
||||||
let token = parser.peek_token();
|
let token = parser.peek_token();
|
||||||
debug!("get_next_precedence() {:?}", token);
|
debug!("get_next_precedence() {token:?}");
|
||||||
|
|
||||||
// we only return some custom value here when the behaviour (not merely the numeric value) differs
|
// we only return some custom value here when the behaviour (not merely the numeric value) differs
|
||||||
// from the default implementation
|
// from the default implementation
|
||||||
|
@ -258,4 +259,21 @@ impl Dialect for PostgreSqlDialect {
|
||||||
fn supports_set_names(&self) -> bool {
|
fn supports_set_names(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_alter_column_type_using(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Postgres supports `NOTNULL` as an alias for `IS NOT NULL`
|
||||||
|
/// See: <https://www.postgresql.org/docs/17/functions-comparison.html>
|
||||||
|
fn supports_notnull_operator(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
/// [Postgres] supports optional field and precision options for `INTERVAL` data type.
|
||||||
|
///
|
||||||
|
/// [Postgres]: https://www.postgresql.org/docs/17/datatype-datetime.html
|
||||||
|
fn supports_interval_options(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,12 +80,14 @@ impl Dialect for RedshiftSqlDialect {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_identifier_start(&self, ch: char) -> bool {
|
fn is_identifier_start(&self, ch: char) -> bool {
|
||||||
// Extends Postgres dialect with sharp
|
// UTF-8 multibyte characters are supported in identifiers via the PostgreSqlDialect.
|
||||||
|
// https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
|
||||||
PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#'
|
PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#'
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_identifier_part(&self, ch: char) -> bool {
|
fn is_identifier_part(&self, ch: char) -> bool {
|
||||||
// Extends Postgres dialect with sharp
|
// UTF-8 multibyte characters are supported in identifiers via the PostgreSqlDialect.
|
||||||
|
// https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
|
||||||
PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#'
|
PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -129,4 +131,12 @@ impl Dialect for RedshiftSqlDialect {
|
||||||
fn supports_string_literal_backslash_escape(&self) -> bool {
|
fn supports_string_literal_backslash_escape(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_select_wildcard_exclude(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn supports_select_exclude(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,21 +17,25 @@
|
||||||
|
|
||||||
#[cfg(not(feature = "std"))]
|
#[cfg(not(feature = "std"))]
|
||||||
use crate::alloc::string::ToString;
|
use crate::alloc::string::ToString;
|
||||||
use crate::ast::helpers::key_value_options::{KeyValueOption, KeyValueOptionType, KeyValueOptions};
|
use crate::ast::helpers::key_value_options::{
|
||||||
|
KeyValueOption, KeyValueOptionType, KeyValueOptions, KeyValueOptionsDelimiter,
|
||||||
|
};
|
||||||
|
use crate::ast::helpers::stmt_create_database::CreateDatabaseBuilder;
|
||||||
use crate::ast::helpers::stmt_create_table::CreateTableBuilder;
|
use crate::ast::helpers::stmt_create_table::CreateTableBuilder;
|
||||||
use crate::ast::helpers::stmt_data_loading::{
|
use crate::ast::helpers::stmt_data_loading::{
|
||||||
FileStagingCommand, StageLoadSelectItem, StageLoadSelectItemKind, StageParamsObject,
|
FileStagingCommand, StageLoadSelectItem, StageLoadSelectItemKind, StageParamsObject,
|
||||||
};
|
};
|
||||||
use crate::ast::{
|
use crate::ast::{
|
||||||
ColumnOption, ColumnPolicy, ColumnPolicyProperty, CopyIntoSnowflakeKind, Ident,
|
CatalogSyncNamespaceMode, ColumnOption, ColumnPolicy, ColumnPolicyProperty, ContactEntry,
|
||||||
IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind,
|
CopyIntoSnowflakeKind, DollarQuotedString, Ident, IdentityParameters, IdentityProperty,
|
||||||
IdentityPropertyOrder, ObjectName, RowAccessPolicy, ShowObjects, SqlOption, Statement,
|
IdentityPropertyFormatKind, IdentityPropertyKind, IdentityPropertyOrder, ObjectName,
|
||||||
|
ObjectNamePart, RowAccessPolicy, ShowObjects, SqlOption, Statement, StorageSerializationPolicy,
|
||||||
TagsColumnOption, WrappedCollection,
|
TagsColumnOption, WrappedCollection,
|
||||||
};
|
};
|
||||||
use crate::dialect::{Dialect, Precedence};
|
use crate::dialect::{Dialect, Precedence};
|
||||||
use crate::keywords::Keyword;
|
use crate::keywords::Keyword;
|
||||||
use crate::parser::{IsOptional, Parser, ParserError};
|
use crate::parser::{IsOptional, Parser, ParserError};
|
||||||
use crate::tokenizer::{Token, Word};
|
use crate::tokenizer::Token;
|
||||||
#[cfg(not(feature = "std"))]
|
#[cfg(not(feature = "std"))]
|
||||||
use alloc::boxed::Box;
|
use alloc::boxed::Box;
|
||||||
#[cfg(not(feature = "std"))]
|
#[cfg(not(feature = "std"))]
|
||||||
|
@ -42,9 +46,84 @@ use alloc::vec::Vec;
|
||||||
use alloc::{format, vec};
|
use alloc::{format, vec};
|
||||||
|
|
||||||
use super::keywords::RESERVED_FOR_IDENTIFIER;
|
use super::keywords::RESERVED_FOR_IDENTIFIER;
|
||||||
use sqlparser::ast::StorageSerializationPolicy;
|
|
||||||
|
|
||||||
const RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR: [Keyword; 1] = [Keyword::CONNECT_BY_ROOT];
|
const RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR: [Keyword; 1] = [Keyword::CONNECT_BY_ROOT];
|
||||||
|
|
||||||
|
// See: <https://docs.snowflake.com/en/sql-reference/reserved-keywords>
|
||||||
|
const RESERVED_KEYWORDS_FOR_TABLE_FACTOR: &[Keyword] = &[
|
||||||
|
Keyword::ALL,
|
||||||
|
Keyword::ALTER,
|
||||||
|
Keyword::AND,
|
||||||
|
Keyword::ANY,
|
||||||
|
Keyword::AS,
|
||||||
|
Keyword::BETWEEN,
|
||||||
|
Keyword::BY,
|
||||||
|
Keyword::CHECK,
|
||||||
|
Keyword::COLUMN,
|
||||||
|
Keyword::CONNECT,
|
||||||
|
Keyword::CREATE,
|
||||||
|
Keyword::CROSS,
|
||||||
|
Keyword::CURRENT,
|
||||||
|
Keyword::DELETE,
|
||||||
|
Keyword::DISTINCT,
|
||||||
|
Keyword::DROP,
|
||||||
|
Keyword::ELSE,
|
||||||
|
Keyword::EXISTS,
|
||||||
|
Keyword::FOLLOWING,
|
||||||
|
Keyword::FOR,
|
||||||
|
Keyword::FROM,
|
||||||
|
Keyword::FULL,
|
||||||
|
Keyword::GRANT,
|
||||||
|
Keyword::GROUP,
|
||||||
|
Keyword::HAVING,
|
||||||
|
Keyword::ILIKE,
|
||||||
|
Keyword::IN,
|
||||||
|
Keyword::INCREMENT,
|
||||||
|
Keyword::INNER,
|
||||||
|
Keyword::INSERT,
|
||||||
|
Keyword::INTERSECT,
|
||||||
|
Keyword::INTO,
|
||||||
|
Keyword::IS,
|
||||||
|
Keyword::JOIN,
|
||||||
|
Keyword::LEFT,
|
||||||
|
Keyword::LIKE,
|
||||||
|
Keyword::MINUS,
|
||||||
|
Keyword::NATURAL,
|
||||||
|
Keyword::NOT,
|
||||||
|
Keyword::NULL,
|
||||||
|
Keyword::OF,
|
||||||
|
Keyword::ON,
|
||||||
|
Keyword::OR,
|
||||||
|
Keyword::ORDER,
|
||||||
|
Keyword::QUALIFY,
|
||||||
|
Keyword::REGEXP,
|
||||||
|
Keyword::REVOKE,
|
||||||
|
Keyword::RIGHT,
|
||||||
|
Keyword::RLIKE,
|
||||||
|
Keyword::ROW,
|
||||||
|
Keyword::ROWS,
|
||||||
|
Keyword::SAMPLE,
|
||||||
|
Keyword::SELECT,
|
||||||
|
Keyword::SET,
|
||||||
|
Keyword::SOME,
|
||||||
|
Keyword::START,
|
||||||
|
Keyword::TABLE,
|
||||||
|
Keyword::TABLESAMPLE,
|
||||||
|
Keyword::THEN,
|
||||||
|
Keyword::TO,
|
||||||
|
Keyword::TRIGGER,
|
||||||
|
Keyword::UNION,
|
||||||
|
Keyword::UNIQUE,
|
||||||
|
Keyword::UPDATE,
|
||||||
|
Keyword::USING,
|
||||||
|
Keyword::VALUES,
|
||||||
|
Keyword::WHEN,
|
||||||
|
Keyword::WHENEVER,
|
||||||
|
Keyword::WHERE,
|
||||||
|
Keyword::WINDOW,
|
||||||
|
Keyword::WITH,
|
||||||
|
];
|
||||||
|
|
||||||
/// A [`Dialect`] for [Snowflake](https://www.snowflake.com/)
|
/// A [`Dialect`] for [Snowflake](https://www.snowflake.com/)
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct SnowflakeDialect;
|
pub struct SnowflakeDialect;
|
||||||
|
@ -131,6 +210,10 @@ impl Dialect for SnowflakeDialect {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||||
|
if parser.parse_keyword(Keyword::BEGIN) {
|
||||||
|
return Some(parser.parse_begin_exception_end());
|
||||||
|
}
|
||||||
|
|
||||||
if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) {
|
if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) {
|
||||||
// ALTER SESSION
|
// ALTER SESSION
|
||||||
let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) {
|
let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) {
|
||||||
|
@ -178,6 +261,8 @@ impl Dialect for SnowflakeDialect {
|
||||||
return Some(parse_create_table(
|
return Some(parse_create_table(
|
||||||
or_replace, global, temporary, volatile, transient, iceberg, parser,
|
or_replace, global, temporary, volatile, transient, iceberg, parser,
|
||||||
));
|
));
|
||||||
|
} else if parser.parse_keyword(Keyword::DATABASE) {
|
||||||
|
return Some(parse_create_database(or_replace, transient, parser));
|
||||||
} else {
|
} else {
|
||||||
// need to go back with the cursor
|
// need to go back with the cursor
|
||||||
let mut back = 1;
|
let mut back = 1;
|
||||||
|
@ -279,6 +364,10 @@ impl Dialect for SnowflakeDialect {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_left_associative_joins_without_parens(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
fn is_reserved_for_identifier(&self, kw: Keyword) -> bool {
|
fn is_reserved_for_identifier(&self, kw: Keyword) -> bool {
|
||||||
// Unreserve some keywords that Snowflake accepts as identifiers
|
// Unreserve some keywords that Snowflake accepts as identifiers
|
||||||
// See: https://docs.snowflake.com/en/sql-reference/reserved-keywords
|
// See: https://docs.snowflake.com/en/sql-reference/reserved-keywords
|
||||||
|
@ -293,27 +382,28 @@ impl Dialect for SnowflakeDialect {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_select_item_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
|
fn is_column_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool {
|
||||||
explicit
|
match kw {
|
||||||
|| match kw {
|
|
||||||
// The following keywords can be considered an alias as long as
|
// The following keywords can be considered an alias as long as
|
||||||
// they are not followed by other tokens that may change their meaning
|
// they are not followed by other tokens that may change their meaning
|
||||||
// e.g. `SELECT * EXCEPT (col1) FROM tbl`
|
// e.g. `SELECT * EXCEPT (col1) FROM tbl`
|
||||||
Keyword::EXCEPT
|
Keyword::EXCEPT
|
||||||
// e.g. `SELECT 1 LIMIT 5`
|
|
||||||
| Keyword::LIMIT
|
|
||||||
// e.g. `SELECT 1 OFFSET 5 ROWS`
|
|
||||||
| Keyword::OFFSET
|
|
||||||
// e.g. `INSERT INTO t SELECT 1 RETURNING *`
|
// e.g. `INSERT INTO t SELECT 1 RETURNING *`
|
||||||
| Keyword::RETURNING if !matches!(parser.peek_token_ref().token, Token::Comma | Token::EOF) =>
|
| Keyword::RETURNING if !matches!(parser.peek_token_ref().token, Token::Comma | Token::EOF) =>
|
||||||
{
|
{
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// e.g. `SELECT 1 LIMIT 5` - not an alias
|
||||||
|
// e.g. `SELECT 1 OFFSET 5 ROWS` - not an alias
|
||||||
|
Keyword::LIMIT | Keyword::OFFSET if peek_for_limit_options(parser) => false,
|
||||||
|
|
||||||
// `FETCH` can be considered an alias as long as it's not followed by `FIRST`` or `NEXT`
|
// `FETCH` can be considered an alias as long as it's not followed by `FIRST`` or `NEXT`
|
||||||
// which would give it a different meanings, for example: `SELECT 1 FETCH FIRST 10 ROWS` - not an alias
|
// which would give it a different meanings, for example:
|
||||||
Keyword::FETCH
|
// `SELECT 1 FETCH FIRST 10 ROWS` - not an alias
|
||||||
if parser.peek_keyword(Keyword::FIRST) || parser.peek_keyword(Keyword::NEXT) =>
|
// `SELECT 1 FETCH 10` - not an alias
|
||||||
|
Keyword::FETCH if parser.peek_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]).is_some()
|
||||||
|
|| peek_for_limit_options(parser) =>
|
||||||
{
|
{
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
@ -338,6 +428,97 @@ impl Dialect for SnowflakeDialect {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_table_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool {
|
||||||
|
match kw {
|
||||||
|
// The following keywords can be considered an alias as long as
|
||||||
|
// they are not followed by other tokens that may change their meaning
|
||||||
|
Keyword::RETURNING
|
||||||
|
| Keyword::INNER
|
||||||
|
| Keyword::USING
|
||||||
|
| Keyword::PIVOT
|
||||||
|
| Keyword::UNPIVOT
|
||||||
|
| Keyword::EXCEPT
|
||||||
|
| Keyword::MATCH_RECOGNIZE
|
||||||
|
if !matches!(parser.peek_token_ref().token, Token::SemiColon | Token::EOF) =>
|
||||||
|
{
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
// `LIMIT` can be considered an alias as long as it's not followed by a value. For example:
|
||||||
|
// `SELECT * FROM tbl LIMIT WHERE 1=1` - alias
|
||||||
|
// `SELECT * FROM tbl LIMIT 3` - not an alias
|
||||||
|
Keyword::LIMIT | Keyword::OFFSET if peek_for_limit_options(parser) => false,
|
||||||
|
|
||||||
|
// `FETCH` can be considered an alias as long as it's not followed by `FIRST`` or `NEXT`
|
||||||
|
// which would give it a different meanings, for example:
|
||||||
|
// `SELECT * FROM tbl FETCH FIRST 10 ROWS` - not an alias
|
||||||
|
// `SELECT * FROM tbl FETCH 10` - not an alias
|
||||||
|
Keyword::FETCH
|
||||||
|
if parser
|
||||||
|
.peek_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])
|
||||||
|
.is_some()
|
||||||
|
|| peek_for_limit_options(parser) =>
|
||||||
|
{
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
// All sorts of join-related keywords can be considered aliases unless additional
|
||||||
|
// keywords change their meaning.
|
||||||
|
Keyword::RIGHT | Keyword::LEFT | Keyword::SEMI | Keyword::ANTI
|
||||||
|
if parser
|
||||||
|
.peek_one_of_keywords(&[Keyword::JOIN, Keyword::OUTER])
|
||||||
|
.is_some() =>
|
||||||
|
{
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
Keyword::GLOBAL if parser.peek_keyword(Keyword::FULL) => false,
|
||||||
|
|
||||||
|
// Reserved keywords by the Snowflake dialect, which seem to be less strictive
|
||||||
|
// than what is listed in `keywords::RESERVED_FOR_TABLE_ALIAS`. The following
|
||||||
|
// keywords were tested with the this statement: `SELECT <KW>.* FROM tbl <KW>`.
|
||||||
|
Keyword::WITH
|
||||||
|
| Keyword::ORDER
|
||||||
|
| Keyword::SELECT
|
||||||
|
| Keyword::WHERE
|
||||||
|
| Keyword::GROUP
|
||||||
|
| Keyword::HAVING
|
||||||
|
| Keyword::LATERAL
|
||||||
|
| Keyword::UNION
|
||||||
|
| Keyword::INTERSECT
|
||||||
|
| Keyword::MINUS
|
||||||
|
| Keyword::ON
|
||||||
|
| Keyword::JOIN
|
||||||
|
| Keyword::INNER
|
||||||
|
| Keyword::CROSS
|
||||||
|
| Keyword::FULL
|
||||||
|
| Keyword::LEFT
|
||||||
|
| Keyword::RIGHT
|
||||||
|
| Keyword::NATURAL
|
||||||
|
| Keyword::USING
|
||||||
|
| Keyword::ASOF
|
||||||
|
| Keyword::MATCH_CONDITION
|
||||||
|
| Keyword::SET
|
||||||
|
| Keyword::QUALIFY
|
||||||
|
| Keyword::FOR
|
||||||
|
| Keyword::START
|
||||||
|
| Keyword::CONNECT
|
||||||
|
| Keyword::SAMPLE
|
||||||
|
| Keyword::TABLESAMPLE
|
||||||
|
| Keyword::FROM => false,
|
||||||
|
|
||||||
|
// Any other word is considered an alias
|
||||||
|
_ => true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_table_factor(&self, kw: &Keyword, parser: &mut Parser) -> bool {
|
||||||
|
match kw {
|
||||||
|
Keyword::LIMIT if peek_for_limit_options(parser) => false,
|
||||||
|
_ => !RESERVED_KEYWORDS_FOR_TABLE_FACTOR.contains(kw),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// See: <https://docs.snowflake.com/en/sql-reference/constructs/at-before>
|
/// See: <https://docs.snowflake.com/en/sql-reference/constructs/at-before>
|
||||||
fn supports_timestamp_versioning(&self) -> bool {
|
fn supports_timestamp_versioning(&self) -> bool {
|
||||||
true
|
true
|
||||||
|
@ -352,6 +533,47 @@ impl Dialect for SnowflakeDialect {
|
||||||
fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
|
fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
|
||||||
&RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR
|
&RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_space_separated_column_options(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn supports_comma_separated_drop_column_list(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_identifier_generating_function_name(
|
||||||
|
&self,
|
||||||
|
ident: &Ident,
|
||||||
|
name_parts: &[ObjectNamePart],
|
||||||
|
) -> bool {
|
||||||
|
ident.quote_style.is_none()
|
||||||
|
&& ident.value.to_lowercase() == "identifier"
|
||||||
|
&& !name_parts
|
||||||
|
.iter()
|
||||||
|
.any(|p| matches!(p, ObjectNamePart::Function(_)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// For example: `SELECT IDENTIFIER('alias1').* FROM tbl AS alias1`
|
||||||
|
fn supports_select_expr_star(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn supports_select_wildcard_exclude(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Peeks ahead to identify tokens that are expected after
|
||||||
|
// a LIMIT/FETCH keyword.
|
||||||
|
fn peek_for_limit_options(parser: &Parser) -> bool {
|
||||||
|
match &parser.peek_token_ref().token {
|
||||||
|
Token::Number(_, _) | Token::Placeholder(_) => true,
|
||||||
|
Token::SingleQuotedString(val) if val.is_empty() => true,
|
||||||
|
Token::DollarQuotedString(DollarQuotedString { value, .. }) if value.is_empty() => true,
|
||||||
|
Token::Word(w) if w.keyword == Keyword::NULL => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result<Statement, ParserError> {
|
fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result<Statement, ParserError> {
|
||||||
|
@ -382,6 +604,7 @@ fn parse_alter_session(parser: &mut Parser, set: bool) -> Result<Statement, Pars
|
||||||
set,
|
set,
|
||||||
session_params: KeyValueOptions {
|
session_params: KeyValueOptions {
|
||||||
options: session_options,
|
options: session_options,
|
||||||
|
delimiter: KeyValueOptionsDelimiter::Space,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -437,23 +660,20 @@ pub fn parse_create_table(
|
||||||
Keyword::AS => {
|
Keyword::AS => {
|
||||||
let query = parser.parse_query()?;
|
let query = parser.parse_query()?;
|
||||||
builder = builder.query(Some(query));
|
builder = builder.query(Some(query));
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
Keyword::CLONE => {
|
Keyword::CLONE => {
|
||||||
let clone = parser.parse_object_name(false).ok();
|
let clone = parser.parse_object_name(false).ok();
|
||||||
builder = builder.clone_clause(clone);
|
builder = builder.clone_clause(clone);
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
Keyword::LIKE => {
|
Keyword::LIKE => {
|
||||||
let like = parser.parse_object_name(false).ok();
|
let like = parser.parse_object_name(false).ok();
|
||||||
builder = builder.like(like);
|
builder = builder.like(like);
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
Keyword::CLUSTER => {
|
Keyword::CLUSTER => {
|
||||||
parser.expect_keyword_is(Keyword::BY)?;
|
parser.expect_keyword_is(Keyword::BY)?;
|
||||||
parser.expect_token(&Token::LParen)?;
|
parser.expect_token(&Token::LParen)?;
|
||||||
let cluster_by = Some(WrappedCollection::Parentheses(
|
let cluster_by = Some(WrappedCollection::Parentheses(
|
||||||
parser.parse_comma_separated(|p| p.parse_identifier())?,
|
parser.parse_comma_separated(|p| p.parse_expr())?,
|
||||||
));
|
));
|
||||||
parser.expect_token(&Token::RParen)?;
|
parser.expect_token(&Token::RParen)?;
|
||||||
|
|
||||||
|
@ -461,29 +681,11 @@ pub fn parse_create_table(
|
||||||
}
|
}
|
||||||
Keyword::ENABLE_SCHEMA_EVOLUTION => {
|
Keyword::ENABLE_SCHEMA_EVOLUTION => {
|
||||||
parser.expect_token(&Token::Eq)?;
|
parser.expect_token(&Token::Eq)?;
|
||||||
let enable_schema_evolution =
|
builder = builder.enable_schema_evolution(Some(parser.parse_boolean_string()?));
|
||||||
match parser.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
|
|
||||||
Some(Keyword::TRUE) => true,
|
|
||||||
Some(Keyword::FALSE) => false,
|
|
||||||
_ => {
|
|
||||||
return parser.expected("TRUE or FALSE", next_token);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
builder = builder.enable_schema_evolution(Some(enable_schema_evolution));
|
|
||||||
}
|
}
|
||||||
Keyword::CHANGE_TRACKING => {
|
Keyword::CHANGE_TRACKING => {
|
||||||
parser.expect_token(&Token::Eq)?;
|
parser.expect_token(&Token::Eq)?;
|
||||||
let change_tracking =
|
builder = builder.change_tracking(Some(parser.parse_boolean_string()?));
|
||||||
match parser.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]) {
|
|
||||||
Some(Keyword::TRUE) => true,
|
|
||||||
Some(Keyword::FALSE) => false,
|
|
||||||
_ => {
|
|
||||||
return parser.expected("TRUE or FALSE", next_token);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
builder = builder.change_tracking(Some(change_tracking));
|
|
||||||
}
|
}
|
||||||
Keyword::DATA_RETENTION_TIME_IN_DAYS => {
|
Keyword::DATA_RETENTION_TIME_IN_DAYS => {
|
||||||
parser.expect_token(&Token::Eq)?;
|
parser.expect_token(&Token::Eq)?;
|
||||||
|
@ -573,7 +775,7 @@ pub fn parse_create_table(
|
||||||
builder = builder.columns(columns).constraints(constraints);
|
builder = builder.columns(columns).constraints(constraints);
|
||||||
}
|
}
|
||||||
Token::EOF => {
|
Token::EOF => {
|
||||||
if builder.columns.is_empty() {
|
if !builder.validate_schema_info() {
|
||||||
return Err(ParserError::ParserError(
|
return Err(ParserError::ParserError(
|
||||||
"unexpected end of input".to_string(),
|
"unexpected end of input".to_string(),
|
||||||
));
|
));
|
||||||
|
@ -582,7 +784,7 @@ pub fn parse_create_table(
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
Token::SemiColon => {
|
Token::SemiColon => {
|
||||||
if builder.columns.is_empty() {
|
if !builder.validate_schema_info() {
|
||||||
return Err(ParserError::ParserError(
|
return Err(ParserError::ParserError(
|
||||||
"unexpected end of input".to_string(),
|
"unexpected end of input".to_string(),
|
||||||
));
|
));
|
||||||
|
@ -613,6 +815,115 @@ pub fn parse_create_table(
|
||||||
Ok(builder.build())
|
Ok(builder.build())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse snowflake create database statement.
|
||||||
|
/// <https://docs.snowflake.com/en/sql-reference/sql/create-database>
|
||||||
|
pub fn parse_create_database(
|
||||||
|
or_replace: bool,
|
||||||
|
transient: bool,
|
||||||
|
parser: &mut Parser,
|
||||||
|
) -> Result<Statement, ParserError> {
|
||||||
|
let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
|
||||||
|
let name = parser.parse_object_name(false)?;
|
||||||
|
|
||||||
|
let mut builder = CreateDatabaseBuilder::new(name)
|
||||||
|
.or_replace(or_replace)
|
||||||
|
.transient(transient)
|
||||||
|
.if_not_exists(if_not_exists);
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let next_token = parser.next_token();
|
||||||
|
match &next_token.token {
|
||||||
|
Token::Word(word) => match word.keyword {
|
||||||
|
Keyword::CLONE => {
|
||||||
|
builder = builder.clone_clause(Some(parser.parse_object_name(false)?));
|
||||||
|
}
|
||||||
|
Keyword::DATA_RETENTION_TIME_IN_DAYS => {
|
||||||
|
parser.expect_token(&Token::Eq)?;
|
||||||
|
builder =
|
||||||
|
builder.data_retention_time_in_days(Some(parser.parse_literal_uint()?));
|
||||||
|
}
|
||||||
|
Keyword::MAX_DATA_EXTENSION_TIME_IN_DAYS => {
|
||||||
|
parser.expect_token(&Token::Eq)?;
|
||||||
|
builder =
|
||||||
|
builder.max_data_extension_time_in_days(Some(parser.parse_literal_uint()?));
|
||||||
|
}
|
||||||
|
Keyword::EXTERNAL_VOLUME => {
|
||||||
|
parser.expect_token(&Token::Eq)?;
|
||||||
|
builder = builder.external_volume(Some(parser.parse_literal_string()?));
|
||||||
|
}
|
||||||
|
Keyword::CATALOG => {
|
||||||
|
parser.expect_token(&Token::Eq)?;
|
||||||
|
builder = builder.catalog(Some(parser.parse_literal_string()?));
|
||||||
|
}
|
||||||
|
Keyword::REPLACE_INVALID_CHARACTERS => {
|
||||||
|
parser.expect_token(&Token::Eq)?;
|
||||||
|
builder =
|
||||||
|
builder.replace_invalid_characters(Some(parser.parse_boolean_string()?));
|
||||||
|
}
|
||||||
|
Keyword::DEFAULT_DDL_COLLATION => {
|
||||||
|
parser.expect_token(&Token::Eq)?;
|
||||||
|
builder = builder.default_ddl_collation(Some(parser.parse_literal_string()?));
|
||||||
|
}
|
||||||
|
Keyword::STORAGE_SERIALIZATION_POLICY => {
|
||||||
|
parser.expect_token(&Token::Eq)?;
|
||||||
|
let policy = parse_storage_serialization_policy(parser)?;
|
||||||
|
builder = builder.storage_serialization_policy(Some(policy));
|
||||||
|
}
|
||||||
|
Keyword::COMMENT => {
|
||||||
|
parser.expect_token(&Token::Eq)?;
|
||||||
|
builder = builder.comment(Some(parser.parse_literal_string()?));
|
||||||
|
}
|
||||||
|
Keyword::CATALOG_SYNC => {
|
||||||
|
parser.expect_token(&Token::Eq)?;
|
||||||
|
builder = builder.catalog_sync(Some(parser.parse_literal_string()?));
|
||||||
|
}
|
||||||
|
Keyword::CATALOG_SYNC_NAMESPACE_FLATTEN_DELIMITER => {
|
||||||
|
parser.expect_token(&Token::Eq)?;
|
||||||
|
builder = builder.catalog_sync_namespace_flatten_delimiter(Some(
|
||||||
|
parser.parse_literal_string()?,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Keyword::CATALOG_SYNC_NAMESPACE_MODE => {
|
||||||
|
parser.expect_token(&Token::Eq)?;
|
||||||
|
let mode =
|
||||||
|
match parser.parse_one_of_keywords(&[Keyword::NEST, Keyword::FLATTEN]) {
|
||||||
|
Some(Keyword::NEST) => CatalogSyncNamespaceMode::Nest,
|
||||||
|
Some(Keyword::FLATTEN) => CatalogSyncNamespaceMode::Flatten,
|
||||||
|
_ => {
|
||||||
|
return parser.expected("NEST or FLATTEN", next_token);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
builder = builder.catalog_sync_namespace_mode(Some(mode));
|
||||||
|
}
|
||||||
|
Keyword::WITH => {
|
||||||
|
if parser.parse_keyword(Keyword::TAG) {
|
||||||
|
parser.expect_token(&Token::LParen)?;
|
||||||
|
let tags = parser.parse_comma_separated(Parser::parse_tag)?;
|
||||||
|
parser.expect_token(&Token::RParen)?;
|
||||||
|
builder = builder.with_tags(Some(tags));
|
||||||
|
} else if parser.parse_keyword(Keyword::CONTACT) {
|
||||||
|
parser.expect_token(&Token::LParen)?;
|
||||||
|
let contacts = parser.parse_comma_separated(|p| {
|
||||||
|
let purpose = p.parse_identifier()?.value;
|
||||||
|
p.expect_token(&Token::Eq)?;
|
||||||
|
let contact = p.parse_identifier()?.value;
|
||||||
|
Ok(ContactEntry { purpose, contact })
|
||||||
|
})?;
|
||||||
|
parser.expect_token(&Token::RParen)?;
|
||||||
|
builder = builder.with_contacts(Some(contacts));
|
||||||
|
} else {
|
||||||
|
return parser.expected("TAG or CONTACT", next_token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => return parser.expected("end of statement", next_token),
|
||||||
|
},
|
||||||
|
Token::SemiColon | Token::EOF => break,
|
||||||
|
_ => return parser.expected("end of statement", next_token),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(builder.build())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn parse_storage_serialization_policy(
|
pub fn parse_storage_serialization_policy(
|
||||||
parser: &mut Parser,
|
parser: &mut Parser,
|
||||||
) -> Result<StorageSerializationPolicy, ParserError> {
|
) -> Result<StorageSerializationPolicy, ParserError> {
|
||||||
|
@ -646,19 +957,19 @@ pub fn parse_create_stage(
|
||||||
// [ directoryTableParams ]
|
// [ directoryTableParams ]
|
||||||
if parser.parse_keyword(Keyword::DIRECTORY) {
|
if parser.parse_keyword(Keyword::DIRECTORY) {
|
||||||
parser.expect_token(&Token::Eq)?;
|
parser.expect_token(&Token::Eq)?;
|
||||||
directory_table_params = parse_parentheses_options(parser)?;
|
directory_table_params = parser.parse_key_value_options(true, &[])?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// [ file_format]
|
// [ file_format]
|
||||||
if parser.parse_keyword(Keyword::FILE_FORMAT) {
|
if parser.parse_keyword(Keyword::FILE_FORMAT) {
|
||||||
parser.expect_token(&Token::Eq)?;
|
parser.expect_token(&Token::Eq)?;
|
||||||
file_format = parse_parentheses_options(parser)?;
|
file_format = parser.parse_key_value_options(true, &[])?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// [ copy_options ]
|
// [ copy_options ]
|
||||||
if parser.parse_keyword(Keyword::COPY_OPTIONS) {
|
if parser.parse_keyword(Keyword::COPY_OPTIONS) {
|
||||||
parser.expect_token(&Token::Eq)?;
|
parser.expect_token(&Token::Eq)?;
|
||||||
copy_options = parse_parentheses_options(parser)?;
|
copy_options = parser.parse_key_value_options(true, &[])?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// [ comment ]
|
// [ comment ]
|
||||||
|
@ -675,12 +986,15 @@ pub fn parse_create_stage(
|
||||||
stage_params,
|
stage_params,
|
||||||
directory_table_params: KeyValueOptions {
|
directory_table_params: KeyValueOptions {
|
||||||
options: directory_table_params,
|
options: directory_table_params,
|
||||||
|
delimiter: KeyValueOptionsDelimiter::Space,
|
||||||
},
|
},
|
||||||
file_format: KeyValueOptions {
|
file_format: KeyValueOptions {
|
||||||
options: file_format,
|
options: file_format,
|
||||||
|
delimiter: KeyValueOptionsDelimiter::Space,
|
||||||
},
|
},
|
||||||
copy_options: KeyValueOptions {
|
copy_options: KeyValueOptions {
|
||||||
options: copy_options,
|
options: copy_options,
|
||||||
|
delimiter: KeyValueOptionsDelimiter::Space,
|
||||||
},
|
},
|
||||||
comment,
|
comment,
|
||||||
})
|
})
|
||||||
|
@ -703,6 +1017,8 @@ pub fn parse_stage_name_identifier(parser: &mut Parser) -> Result<Ident, ParserE
|
||||||
Token::Tilde => ident.push('~'),
|
Token::Tilde => ident.push('~'),
|
||||||
Token::Mod => ident.push('%'),
|
Token::Mod => ident.push('%'),
|
||||||
Token::Div => ident.push('/'),
|
Token::Div => ident.push('/'),
|
||||||
|
Token::Plus => ident.push('+'),
|
||||||
|
Token::Number(n, _) => ident.push_str(n),
|
||||||
Token::Word(w) => ident.push_str(&w.to_string()),
|
Token::Word(w) => ident.push_str(&w.to_string()),
|
||||||
_ => return parser.expected("stage name identifier", parser.peek_token()),
|
_ => return parser.expected("stage name identifier", parser.peek_token()),
|
||||||
}
|
}
|
||||||
|
@ -747,10 +1063,16 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
|
||||||
let mut from_stage = None;
|
let mut from_stage = None;
|
||||||
let mut stage_params = StageParamsObject {
|
let mut stage_params = StageParamsObject {
|
||||||
url: None,
|
url: None,
|
||||||
encryption: KeyValueOptions { options: vec![] },
|
encryption: KeyValueOptions {
|
||||||
|
options: vec![],
|
||||||
|
delimiter: KeyValueOptionsDelimiter::Space,
|
||||||
|
},
|
||||||
endpoint: None,
|
endpoint: None,
|
||||||
storage_integration: None,
|
storage_integration: None,
|
||||||
credentials: KeyValueOptions { options: vec![] },
|
credentials: KeyValueOptions {
|
||||||
|
options: vec![],
|
||||||
|
delimiter: KeyValueOptionsDelimiter::Space,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
let mut from_query = None;
|
let mut from_query = None;
|
||||||
let mut partition = None;
|
let mut partition = None;
|
||||||
|
@ -812,7 +1134,7 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
|
||||||
// FILE_FORMAT
|
// FILE_FORMAT
|
||||||
if parser.parse_keyword(Keyword::FILE_FORMAT) {
|
if parser.parse_keyword(Keyword::FILE_FORMAT) {
|
||||||
parser.expect_token(&Token::Eq)?;
|
parser.expect_token(&Token::Eq)?;
|
||||||
file_format = parse_parentheses_options(parser)?;
|
file_format = parser.parse_key_value_options(true, &[])?;
|
||||||
// PARTITION BY
|
// PARTITION BY
|
||||||
} else if parser.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
|
} else if parser.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
|
||||||
partition = Some(Box::new(parser.parse_expr()?))
|
partition = Some(Box::new(parser.parse_expr()?))
|
||||||
|
@ -850,14 +1172,14 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
|
||||||
// COPY OPTIONS
|
// COPY OPTIONS
|
||||||
} else if parser.parse_keyword(Keyword::COPY_OPTIONS) {
|
} else if parser.parse_keyword(Keyword::COPY_OPTIONS) {
|
||||||
parser.expect_token(&Token::Eq)?;
|
parser.expect_token(&Token::Eq)?;
|
||||||
copy_options = parse_parentheses_options(parser)?;
|
copy_options = parser.parse_key_value_options(true, &[])?;
|
||||||
} else {
|
} else {
|
||||||
match parser.next_token().token {
|
match parser.next_token().token {
|
||||||
Token::SemiColon | Token::EOF => break,
|
Token::SemiColon | Token::EOF => break,
|
||||||
Token::Comma => continue,
|
Token::Comma => continue,
|
||||||
// In `COPY INTO <location>` the copy options do not have a shared key
|
// In `COPY INTO <location>` the copy options do not have a shared key
|
||||||
// like in `COPY INTO <table>`
|
// like in `COPY INTO <table>`
|
||||||
Token::Word(key) => copy_options.push(parse_option(parser, key)?),
|
Token::Word(key) => copy_options.push(parser.parse_key_value_option(key)?),
|
||||||
_ => return parser.expected("another copy option, ; or EOF'", parser.peek_token()),
|
_ => return parser.expected("another copy option, ; or EOF'", parser.peek_token()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -876,9 +1198,11 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
|
||||||
pattern,
|
pattern,
|
||||||
file_format: KeyValueOptions {
|
file_format: KeyValueOptions {
|
||||||
options: file_format,
|
options: file_format,
|
||||||
|
delimiter: KeyValueOptionsDelimiter::Space,
|
||||||
},
|
},
|
||||||
copy_options: KeyValueOptions {
|
copy_options: KeyValueOptions {
|
||||||
options: copy_options,
|
options: copy_options,
|
||||||
|
delimiter: KeyValueOptionsDelimiter::Space,
|
||||||
},
|
},
|
||||||
validation_mode,
|
validation_mode,
|
||||||
partition,
|
partition,
|
||||||
|
@ -978,8 +1302,14 @@ fn parse_select_item_for_data_load(
|
||||||
|
|
||||||
fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserError> {
|
fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserError> {
|
||||||
let (mut url, mut storage_integration, mut endpoint) = (None, None, None);
|
let (mut url, mut storage_integration, mut endpoint) = (None, None, None);
|
||||||
let mut encryption: KeyValueOptions = KeyValueOptions { options: vec![] };
|
let mut encryption: KeyValueOptions = KeyValueOptions {
|
||||||
let mut credentials: KeyValueOptions = KeyValueOptions { options: vec![] };
|
options: vec![],
|
||||||
|
delimiter: KeyValueOptionsDelimiter::Space,
|
||||||
|
};
|
||||||
|
let mut credentials: KeyValueOptions = KeyValueOptions {
|
||||||
|
options: vec![],
|
||||||
|
delimiter: KeyValueOptionsDelimiter::Space,
|
||||||
|
};
|
||||||
|
|
||||||
// URL
|
// URL
|
||||||
if parser.parse_keyword(Keyword::URL) {
|
if parser.parse_keyword(Keyword::URL) {
|
||||||
|
@ -1009,7 +1339,8 @@ fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserEr
|
||||||
if parser.parse_keyword(Keyword::CREDENTIALS) {
|
if parser.parse_keyword(Keyword::CREDENTIALS) {
|
||||||
parser.expect_token(&Token::Eq)?;
|
parser.expect_token(&Token::Eq)?;
|
||||||
credentials = KeyValueOptions {
|
credentials = KeyValueOptions {
|
||||||
options: parse_parentheses_options(parser)?,
|
options: parser.parse_key_value_options(true, &[])?,
|
||||||
|
delimiter: KeyValueOptionsDelimiter::Space,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1017,7 +1348,8 @@ fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserEr
|
||||||
if parser.parse_keyword(Keyword::ENCRYPTION) {
|
if parser.parse_keyword(Keyword::ENCRYPTION) {
|
||||||
parser.expect_token(&Token::Eq)?;
|
parser.expect_token(&Token::Eq)?;
|
||||||
encryption = KeyValueOptions {
|
encryption = KeyValueOptions {
|
||||||
options: parse_parentheses_options(parser)?,
|
options: parser.parse_key_value_options(true, &[])?,
|
||||||
|
delimiter: KeyValueOptionsDelimiter::Space,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1051,7 +1383,7 @@ fn parse_session_options(
|
||||||
Token::Word(key) => {
|
Token::Word(key) => {
|
||||||
parser.advance_token();
|
parser.advance_token();
|
||||||
if set {
|
if set {
|
||||||
let option = parse_option(parser, key)?;
|
let option = parser.parse_key_value_option(key)?;
|
||||||
options.push(option);
|
options.push(option);
|
||||||
} else {
|
} else {
|
||||||
options.push(KeyValueOption {
|
options.push(KeyValueOption {
|
||||||
|
@ -1075,63 +1407,6 @@ fn parse_session_options(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses options provided within parentheses like:
|
|
||||||
/// ( ENABLE = { TRUE | FALSE }
|
|
||||||
/// [ AUTO_REFRESH = { TRUE | FALSE } ]
|
|
||||||
/// [ REFRESH_ON_CREATE = { TRUE | FALSE } ]
|
|
||||||
/// [ NOTIFICATION_INTEGRATION = '<notification_integration_name>' ] )
|
|
||||||
///
|
|
||||||
fn parse_parentheses_options(parser: &mut Parser) -> Result<Vec<KeyValueOption>, ParserError> {
|
|
||||||
let mut options: Vec<KeyValueOption> = Vec::new();
|
|
||||||
parser.expect_token(&Token::LParen)?;
|
|
||||||
loop {
|
|
||||||
match parser.next_token().token {
|
|
||||||
Token::RParen => break,
|
|
||||||
Token::Comma => continue,
|
|
||||||
Token::Word(key) => options.push(parse_option(parser, key)?),
|
|
||||||
_ => return parser.expected("another option or ')'", parser.peek_token()),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
Ok(options)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses a `KEY = VALUE` construct based on the specified key
|
|
||||||
fn parse_option(parser: &mut Parser, key: Word) -> Result<KeyValueOption, ParserError> {
|
|
||||||
parser.expect_token(&Token::Eq)?;
|
|
||||||
if parser.parse_keyword(Keyword::TRUE) {
|
|
||||||
Ok(KeyValueOption {
|
|
||||||
option_name: key.value,
|
|
||||||
option_type: KeyValueOptionType::BOOLEAN,
|
|
||||||
value: "TRUE".to_string(),
|
|
||||||
})
|
|
||||||
} else if parser.parse_keyword(Keyword::FALSE) {
|
|
||||||
Ok(KeyValueOption {
|
|
||||||
option_name: key.value,
|
|
||||||
option_type: KeyValueOptionType::BOOLEAN,
|
|
||||||
value: "FALSE".to_string(),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
match parser.next_token().token {
|
|
||||||
Token::SingleQuotedString(value) => Ok(KeyValueOption {
|
|
||||||
option_name: key.value,
|
|
||||||
option_type: KeyValueOptionType::STRING,
|
|
||||||
value,
|
|
||||||
}),
|
|
||||||
Token::Word(word) => Ok(KeyValueOption {
|
|
||||||
option_name: key.value,
|
|
||||||
option_type: KeyValueOptionType::ENUM,
|
|
||||||
value: word.value,
|
|
||||||
}),
|
|
||||||
Token::Number(n, _) => Ok(KeyValueOption {
|
|
||||||
option_name: key.value,
|
|
||||||
option_type: KeyValueOptionType::NUMBER,
|
|
||||||
value: n,
|
|
||||||
}),
|
|
||||||
_ => parser.expected("expected option value", parser.peek_token()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parsing a property of identity or autoincrement column option
|
/// Parsing a property of identity or autoincrement column option
|
||||||
/// Syntax:
|
/// Syntax:
|
||||||
/// ```sql
|
/// ```sql
|
||||||
|
@ -1177,7 +1452,7 @@ fn parse_column_policy_property(
|
||||||
parser: &mut Parser,
|
parser: &mut Parser,
|
||||||
with: bool,
|
with: bool,
|
||||||
) -> Result<ColumnPolicyProperty, ParserError> {
|
) -> Result<ColumnPolicyProperty, ParserError> {
|
||||||
let policy_name = parser.parse_identifier()?;
|
let policy_name = parser.parse_object_name(false)?;
|
||||||
let using_columns = if parser.parse_keyword(Keyword::USING) {
|
let using_columns = if parser.parse_keyword(Keyword::USING) {
|
||||||
parser.expect_token(&Token::LParen)?;
|
parser.expect_token(&Token::LParen)?;
|
||||||
let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
|
let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
|
||||||
|
|
|
@ -110,4 +110,10 @@ impl Dialect for SQLiteDialect {
|
||||||
fn supports_dollar_placeholder(&self) -> bool {
|
fn supports_dollar_placeholder(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// SQLite supports `NOTNULL` as aliases for `IS NOT NULL`
|
||||||
|
/// See: <https://sqlite.org/syntax/expr.html>
|
||||||
|
fn supports_notnull_operator(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,20 @@
|
||||||
|
// Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
// or more contributor license agreements. See the NOTICE file
|
||||||
|
// distributed with this work for additional information
|
||||||
|
// regarding copyright ownership. The ASF licenses this file
|
||||||
|
// to you under the Apache License, Version 2.0 (the
|
||||||
|
// "License"); you may not use this file except in compliance
|
||||||
|
// with the License. You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing,
|
||||||
|
// software distributed under the License is distributed on an
|
||||||
|
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
// KIND, either express or implied. See the License for the
|
||||||
|
// specific language governing permissions and limitations
|
||||||
|
// under the License.
|
||||||
|
|
||||||
//! Utilities for formatting SQL AST nodes with pretty printing support.
|
//! Utilities for formatting SQL AST nodes with pretty printing support.
|
||||||
//!
|
//!
|
||||||
//! The module provides formatters that implement the `Display` trait with support
|
//! The module provides formatters that implement the `Display` trait with support
|
||||||
|
|
|
@ -166,6 +166,8 @@ define_keywords!(
|
||||||
CAST,
|
CAST,
|
||||||
CATALOG,
|
CATALOG,
|
||||||
CATALOG_SYNC,
|
CATALOG_SYNC,
|
||||||
|
CATALOG_SYNC_NAMESPACE_FLATTEN_DELIMITER,
|
||||||
|
CATALOG_SYNC_NAMESPACE_MODE,
|
||||||
CATCH,
|
CATCH,
|
||||||
CEIL,
|
CEIL,
|
||||||
CEILING,
|
CEILING,
|
||||||
|
@ -213,6 +215,7 @@ define_keywords!(
|
||||||
CONNECTOR,
|
CONNECTOR,
|
||||||
CONNECT_BY_ROOT,
|
CONNECT_BY_ROOT,
|
||||||
CONSTRAINT,
|
CONSTRAINT,
|
||||||
|
CONTACT,
|
||||||
CONTAINS,
|
CONTAINS,
|
||||||
CONTINUE,
|
CONTINUE,
|
||||||
CONVERT,
|
CONVERT,
|
||||||
|
@ -366,6 +369,7 @@ define_keywords!(
|
||||||
FIRST,
|
FIRST,
|
||||||
FIRST_VALUE,
|
FIRST_VALUE,
|
||||||
FIXEDSTRING,
|
FIXEDSTRING,
|
||||||
|
FLATTEN,
|
||||||
FLOAT,
|
FLOAT,
|
||||||
FLOAT32,
|
FLOAT32,
|
||||||
FLOAT4,
|
FLOAT4,
|
||||||
|
@ -395,6 +399,7 @@ define_keywords!(
|
||||||
FUNCTION,
|
FUNCTION,
|
||||||
FUNCTIONS,
|
FUNCTIONS,
|
||||||
FUSION,
|
FUSION,
|
||||||
|
FUTURE,
|
||||||
GENERAL,
|
GENERAL,
|
||||||
GENERATE,
|
GENERATE,
|
||||||
GENERATED,
|
GENERATED,
|
||||||
|
@ -583,6 +588,7 @@ define_keywords!(
|
||||||
NATURAL,
|
NATURAL,
|
||||||
NCHAR,
|
NCHAR,
|
||||||
NCLOB,
|
NCLOB,
|
||||||
|
NEST,
|
||||||
NESTED,
|
NESTED,
|
||||||
NETWORK,
|
NETWORK,
|
||||||
NEW,
|
NEW,
|
||||||
|
@ -607,6 +613,7 @@ define_keywords!(
|
||||||
NOT,
|
NOT,
|
||||||
NOTHING,
|
NOTHING,
|
||||||
NOTIFY,
|
NOTIFY,
|
||||||
|
NOTNULL,
|
||||||
NOWAIT,
|
NOWAIT,
|
||||||
NO_WRITE_TO_BINLOG,
|
NO_WRITE_TO_BINLOG,
|
||||||
NTH_VALUE,
|
NTH_VALUE,
|
||||||
|
@ -646,6 +653,7 @@ define_keywords!(
|
||||||
ORDER,
|
ORDER,
|
||||||
ORDINALITY,
|
ORDINALITY,
|
||||||
ORGANIZATION,
|
ORGANIZATION,
|
||||||
|
OTHER,
|
||||||
OUT,
|
OUT,
|
||||||
OUTER,
|
OUTER,
|
||||||
OUTPUT,
|
OUTPUT,
|
||||||
|
@ -753,6 +761,7 @@ define_keywords!(
|
||||||
REPAIR,
|
REPAIR,
|
||||||
REPEATABLE,
|
REPEATABLE,
|
||||||
REPLACE,
|
REPLACE,
|
||||||
|
REPLACE_INVALID_CHARACTERS,
|
||||||
REPLICA,
|
REPLICA,
|
||||||
REPLICATE,
|
REPLICATE,
|
||||||
REPLICATION,
|
REPLICATION,
|
||||||
|
@ -814,6 +823,7 @@ define_keywords!(
|
||||||
SERDE,
|
SERDE,
|
||||||
SERDEPROPERTIES,
|
SERDEPROPERTIES,
|
||||||
SERIALIZABLE,
|
SERIALIZABLE,
|
||||||
|
SERVER,
|
||||||
SERVICE,
|
SERVICE,
|
||||||
SESSION,
|
SESSION,
|
||||||
SESSION_USER,
|
SESSION_USER,
|
||||||
|
@ -866,6 +876,7 @@ define_keywords!(
|
||||||
STORAGE_SERIALIZATION_POLICY,
|
STORAGE_SERIALIZATION_POLICY,
|
||||||
STORED,
|
STORED,
|
||||||
STRAIGHT_JOIN,
|
STRAIGHT_JOIN,
|
||||||
|
STREAM,
|
||||||
STRICT,
|
STRICT,
|
||||||
STRING,
|
STRING,
|
||||||
STRUCT,
|
STRUCT,
|
||||||
|
@ -934,6 +945,8 @@ define_keywords!(
|
||||||
TRY,
|
TRY,
|
||||||
TRY_CAST,
|
TRY_CAST,
|
||||||
TRY_CONVERT,
|
TRY_CONVERT,
|
||||||
|
TSQUERY,
|
||||||
|
TSVECTOR,
|
||||||
TUPLE,
|
TUPLE,
|
||||||
TYPE,
|
TYPE,
|
||||||
UBIGINT,
|
UBIGINT,
|
||||||
|
@ -977,6 +990,7 @@ define_keywords!(
|
||||||
UUID,
|
UUID,
|
||||||
VACUUM,
|
VACUUM,
|
||||||
VALID,
|
VALID,
|
||||||
|
VALIDATE,
|
||||||
VALIDATION_MODE,
|
VALIDATION_MODE,
|
||||||
VALUE,
|
VALUE,
|
||||||
VALUES,
|
VALUES,
|
||||||
|
@ -1012,6 +1026,7 @@ define_keywords!(
|
||||||
WITHOUT,
|
WITHOUT,
|
||||||
WITHOUT_ARRAY_WRAPPER,
|
WITHOUT_ARRAY_WRAPPER,
|
||||||
WORK,
|
WORK,
|
||||||
|
WRAPPER,
|
||||||
WRITE,
|
WRITE,
|
||||||
XML,
|
XML,
|
||||||
XMLNAMESPACES,
|
XMLNAMESPACES,
|
||||||
|
@ -1112,6 +1127,7 @@ pub const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[
|
||||||
Keyword::FETCH,
|
Keyword::FETCH,
|
||||||
Keyword::UNION,
|
Keyword::UNION,
|
||||||
Keyword::EXCEPT,
|
Keyword::EXCEPT,
|
||||||
|
Keyword::EXCLUDE,
|
||||||
Keyword::INTERSECT,
|
Keyword::INTERSECT,
|
||||||
Keyword::MINUS,
|
Keyword::MINUS,
|
||||||
Keyword::CLUSTER,
|
Keyword::CLUSTER,
|
||||||
|
|
1382
src/parser/mod.rs
1382
src/parser/mod.rs
File diff suppressed because it is too large
Load diff
|
@ -270,7 +270,7 @@ impl TestedDialects {
|
||||||
tokenizer = tokenizer.with_unescape(options.unescape);
|
tokenizer = tokenizer.with_unescape(options.unescape);
|
||||||
}
|
}
|
||||||
let tokens = tokenizer.tokenize().unwrap();
|
let tokens = tokenizer.tokenize().unwrap();
|
||||||
assert_eq!(expected, tokens, "Tokenized differently for {:?}", dialect);
|
assert_eq!(expected, tokens, "Tokenized differently for {dialect:?}");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -294,6 +294,11 @@ pub fn all_dialects() -> TestedDialects {
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Returns all available dialects with the specified parser options
|
||||||
|
pub fn all_dialects_with_options(options: ParserOptions) -> TestedDialects {
|
||||||
|
TestedDialects::new_with_options(all_dialects().dialects, options)
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns all dialects matching the given predicate.
|
/// Returns all dialects matching the given predicate.
|
||||||
pub fn all_dialects_where<F>(predicate: F) -> TestedDialects
|
pub fn all_dialects_where<F>(predicate: F) -> TestedDialects
|
||||||
where
|
where
|
||||||
|
@ -366,6 +371,11 @@ pub fn number(n: &str) -> Value {
|
||||||
Value::Number(n.parse().unwrap(), false)
|
Value::Number(n.parse().unwrap(), false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Creates a [Value::SingleQuotedString]
|
||||||
|
pub fn single_quoted_string(s: impl Into<String>) -> Value {
|
||||||
|
Value::SingleQuotedString(s.into())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn table_alias(name: impl Into<String>) -> Option<TableAlias> {
|
pub fn table_alias(name: impl Into<String>) -> Option<TableAlias> {
|
||||||
Some(TableAlias {
|
Some(TableAlias {
|
||||||
name: Ident::new(name),
|
name: Ident::new(name),
|
||||||
|
@ -448,3 +458,52 @@ pub fn call(function: &str, args: impl IntoIterator<Item = Expr>) -> Expr {
|
||||||
within_group: vec![],
|
within_group: vec![],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Gets the first index column (mysql calls it a key part) of the first index found in a
|
||||||
|
/// [`Statement::CreateIndex`], [`Statement::CreateTable`], or [`Statement::AlterTable`].
|
||||||
|
pub fn index_column(stmt: Statement) -> Expr {
|
||||||
|
match stmt {
|
||||||
|
Statement::CreateIndex(CreateIndex { columns, .. }) => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
Statement::CreateTable(CreateTable { constraints, .. }) => {
|
||||||
|
match constraints.first().unwrap() {
|
||||||
|
TableConstraint::Index { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
TableConstraint::Unique { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
TableConstraint::PrimaryKey { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
TableConstraint::FulltextOrSpatial { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Statement::AlterTable { operations, .. } => match operations.first().unwrap() {
|
||||||
|
AlterTableOperation::AddConstraint { constraint, .. } => {
|
||||||
|
match constraint {
|
||||||
|
TableConstraint::Index { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
TableConstraint::Unique { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
TableConstraint::PrimaryKey { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
TableConstraint::FulltextOrSpatial {
|
||||||
|
columns,
|
||||||
|
..
|
||||||
|
} => columns.first().unwrap().column.expr.clone(),
|
||||||
|
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => panic!("Expected a constraint"),
|
||||||
|
},
|
||||||
|
_ => panic!("Expected CREATE INDEX, ALTER TABLE, or CREATE TABLE, got: {stmt:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1751,7 +1751,7 @@ impl<'a> Tokenizer<'a> {
|
||||||
(None, Some(tok)) => Ok(Some(tok)),
|
(None, Some(tok)) => Ok(Some(tok)),
|
||||||
(None, None) => self.tokenizer_error(
|
(None, None) => self.tokenizer_error(
|
||||||
chars.location(),
|
chars.location(),
|
||||||
format!("Expected a valid binary operator after '{}'", prefix),
|
format!("Expected a valid binary operator after '{prefix}'"),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1809,7 +1809,7 @@ impl<'a> Tokenizer<'a> {
|
||||||
chars.next();
|
chars.next();
|
||||||
|
|
||||||
let mut temp = String::new();
|
let mut temp = String::new();
|
||||||
let end_delimiter = format!("${}$", value);
|
let end_delimiter = format!("${value}$");
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match chars.next() {
|
match chars.next() {
|
||||||
|
@ -2402,13 +2402,13 @@ fn take_char_from_hex_digits(
|
||||||
location: chars.location(),
|
location: chars.location(),
|
||||||
})?;
|
})?;
|
||||||
let digit = next_char.to_digit(16).ok_or_else(|| TokenizerError {
|
let digit = next_char.to_digit(16).ok_or_else(|| TokenizerError {
|
||||||
message: format!("Invalid hex digit in escaped unicode string: {}", next_char),
|
message: format!("Invalid hex digit in escaped unicode string: {next_char}"),
|
||||||
location: chars.location(),
|
location: chars.location(),
|
||||||
})?;
|
})?;
|
||||||
result = result * 16 + digit;
|
result = result * 16 + digit;
|
||||||
}
|
}
|
||||||
char::from_u32(result).ok_or_else(|| TokenizerError {
|
char::from_u32(result).ok_or_else(|| TokenizerError {
|
||||||
message: format!("Invalid unicode character: {:x}", result),
|
message: format!("Invalid unicode character: {result:x}"),
|
||||||
location: chars.location(),
|
location: chars.location(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -3504,7 +3504,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_unescape(s: &str, expected: Option<&str>) {
|
fn check_unescape(s: &str, expected: Option<&str>) {
|
||||||
let s = format!("'{}'", s);
|
let s = format!("'{s}'");
|
||||||
let mut state = State {
|
let mut state = State {
|
||||||
peekable: s.chars().peekable(),
|
peekable: s.chars().peekable(),
|
||||||
line: 0,
|
line: 0,
|
||||||
|
|
|
@ -1,3 +1,20 @@
|
||||||
|
// Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
// or more contributor license agreements. See the NOTICE file
|
||||||
|
// distributed with this work for additional information
|
||||||
|
// regarding copyright ownership. The ASF licenses this file
|
||||||
|
// to you under the Apache License, Version 2.0 (the
|
||||||
|
// "License"); you may not use this file except in compliance
|
||||||
|
// with the License. You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing,
|
||||||
|
// software distributed under the License is distributed on an
|
||||||
|
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
// KIND, either express or implied. See the License for the
|
||||||
|
// specific language governing permissions and limitations
|
||||||
|
// under the License.
|
||||||
|
|
||||||
use sqlparser::dialect::GenericDialect;
|
use sqlparser::dialect::GenericDialect;
|
||||||
use sqlparser::parser::Parser;
|
use sqlparser::parser::Parser;
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -28,7 +28,7 @@ use test_utils::*;
|
||||||
use sqlparser::ast::Expr::{BinaryOp, Identifier};
|
use sqlparser::ast::Expr::{BinaryOp, Identifier};
|
||||||
use sqlparser::ast::SelectItem::UnnamedExpr;
|
use sqlparser::ast::SelectItem::UnnamedExpr;
|
||||||
use sqlparser::ast::TableFactor::Table;
|
use sqlparser::ast::TableFactor::Table;
|
||||||
use sqlparser::ast::Value::Number;
|
use sqlparser::ast::Value::Boolean;
|
||||||
use sqlparser::ast::*;
|
use sqlparser::ast::*;
|
||||||
use sqlparser::dialect::ClickHouseDialect;
|
use sqlparser::dialect::ClickHouseDialect;
|
||||||
use sqlparser::dialect::GenericDialect;
|
use sqlparser::dialect::GenericDialect;
|
||||||
|
@ -60,6 +60,7 @@ fn parse_map_access_expr() {
|
||||||
),
|
),
|
||||||
})],
|
})],
|
||||||
})],
|
})],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![TableWithJoins {
|
from: vec![TableWithJoins {
|
||||||
relation: table_from_name(ObjectName::from(vec![Ident::new("foos")])),
|
relation: table_from_name(ObjectName::from(vec![Ident::new("foos")])),
|
||||||
|
@ -224,6 +225,10 @@ fn parse_create_table() {
|
||||||
clickhouse().verified_stmt(
|
clickhouse().verified_stmt(
|
||||||
r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#,
|
r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#,
|
||||||
);
|
);
|
||||||
|
clickhouse().one_statement_parses_to(
|
||||||
|
"CREATE TABLE x (a int) ENGINE = MergeTree() ORDER BY a",
|
||||||
|
"CREATE TABLE x (a INT) ENGINE = MergeTree ORDER BY a",
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -669,11 +674,13 @@ fn parse_create_table_with_nested_data_types() {
|
||||||
DataType::Tuple(vec![
|
DataType::Tuple(vec![
|
||||||
StructField {
|
StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::FixedString(128)
|
field_type: DataType::FixedString(128),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Int128
|
field_type: DataType::Int128,
|
||||||
|
options: None,
|
||||||
}
|
}
|
||||||
])
|
])
|
||||||
))),
|
))),
|
||||||
|
@ -685,12 +692,14 @@ fn parse_create_table_with_nested_data_types() {
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some("a".into()),
|
field_name: Some("a".into()),
|
||||||
field_type: DataType::Datetime64(9, None),
|
field_type: DataType::Datetime64(9, None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some("b".into()),
|
field_name: Some("b".into()),
|
||||||
field_type: DataType::Array(ArrayElemTypeDef::Parenthesis(
|
field_type: DataType::Array(ArrayElemTypeDef::Parenthesis(
|
||||||
Box::new(DataType::Uuid)
|
Box::new(DataType::Uuid)
|
||||||
))
|
)),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
]),
|
]),
|
||||||
options: vec![],
|
options: vec![],
|
||||||
|
@ -910,7 +919,7 @@ fn parse_create_view_with_fields_data_types() {
|
||||||
}]),
|
}]),
|
||||||
vec![]
|
vec![]
|
||||||
)),
|
)),
|
||||||
options: None
|
options: None,
|
||||||
},
|
},
|
||||||
ViewColumnDef {
|
ViewColumnDef {
|
||||||
name: "f".into(),
|
name: "f".into(),
|
||||||
|
@ -922,7 +931,7 @@ fn parse_create_view_with_fields_data_types() {
|
||||||
}]),
|
}]),
|
||||||
vec![]
|
vec![]
|
||||||
)),
|
)),
|
||||||
options: None
|
options: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
@ -961,38 +970,103 @@ fn parse_limit_by() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_settings_in_query() {
|
fn parse_settings_in_query() {
|
||||||
match clickhouse_and_generic()
|
fn check_settings(sql: &str, expected: Vec<Setting>) {
|
||||||
.verified_stmt(r#"SELECT * FROM t SETTINGS max_threads = 1, max_block_size = 10000"#)
|
match clickhouse_and_generic().verified_stmt(sql) {
|
||||||
{
|
Statement::Query(q) => {
|
||||||
Statement::Query(query) => {
|
assert_eq!(q.settings, Some(expected));
|
||||||
assert_eq!(
|
|
||||||
query.settings,
|
|
||||||
Some(vec![
|
|
||||||
Setting {
|
|
||||||
key: Ident::new("max_threads"),
|
|
||||||
value: Number("1".parse().unwrap(), false)
|
|
||||||
},
|
|
||||||
Setting {
|
|
||||||
key: Ident::new("max_block_size"),
|
|
||||||
value: Number("10000".parse().unwrap(), false)
|
|
||||||
},
|
|
||||||
])
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (sql, expected_settings) in [
|
||||||
|
(
|
||||||
|
r#"SELECT * FROM t SETTINGS max_threads = 1, max_block_size = 10000"#,
|
||||||
|
vec![
|
||||||
|
Setting {
|
||||||
|
key: Ident::new("max_threads"),
|
||||||
|
value: Expr::value(number("1")),
|
||||||
|
},
|
||||||
|
Setting {
|
||||||
|
key: Ident::new("max_block_size"),
|
||||||
|
value: Expr::value(number("10000")),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
(
|
||||||
|
r#"SELECT * FROM t SETTINGS additional_table_filters = {'table_1': 'x != 2'}"#,
|
||||||
|
vec![Setting {
|
||||||
|
key: Ident::new("additional_table_filters"),
|
||||||
|
value: Expr::Dictionary(vec![DictionaryField {
|
||||||
|
key: Ident::with_quote('\'', "table_1"),
|
||||||
|
value: Expr::value(single_quoted_string("x != 2")).into(),
|
||||||
|
}]),
|
||||||
|
}],
|
||||||
|
),
|
||||||
|
(
|
||||||
|
r#"SELECT * FROM t SETTINGS additional_result_filter = 'x != 2', query_plan_optimize_lazy_materialization = false"#,
|
||||||
|
vec![
|
||||||
|
Setting {
|
||||||
|
key: Ident::new("additional_result_filter"),
|
||||||
|
value: Expr::value(single_quoted_string("x != 2")),
|
||||||
|
},
|
||||||
|
Setting {
|
||||||
|
key: Ident::new("query_plan_optimize_lazy_materialization"),
|
||||||
|
value: Expr::value(Boolean(false)),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
] {
|
||||||
|
check_settings(sql, expected_settings);
|
||||||
|
}
|
||||||
|
|
||||||
let invalid_cases = vec![
|
let invalid_cases = vec![
|
||||||
"SELECT * FROM t SETTINGS a",
|
("SELECT * FROM t SETTINGS a", "Expected: =, found: EOF"),
|
||||||
|
(
|
||||||
"SELECT * FROM t SETTINGS a=",
|
"SELECT * FROM t SETTINGS a=",
|
||||||
"SELECT * FROM t SETTINGS a=1, b",
|
"Expected: an expression, found: EOF",
|
||||||
|
),
|
||||||
|
("SELECT * FROM t SETTINGS a=1, b", "Expected: =, found: EOF"),
|
||||||
|
(
|
||||||
"SELECT * FROM t SETTINGS a=1, b=",
|
"SELECT * FROM t SETTINGS a=1, b=",
|
||||||
"SELECT * FROM t SETTINGS a=1, b=c",
|
"Expected: an expression, found: EOF",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {",
|
||||||
|
"Expected: identifier, found: EOF",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {'b'",
|
||||||
|
"Expected: :, found: EOF",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {'b': ",
|
||||||
|
"Expected: an expression, found: EOF",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {'b': 'c',}",
|
||||||
|
"Expected: identifier, found: }",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {'b': 'c', 'd'}",
|
||||||
|
"Expected: :, found: }",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {'b': 'c', 'd': }",
|
||||||
|
"Expected: an expression, found: }",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {ANY(b)}",
|
||||||
|
"Expected: :, found: (",
|
||||||
|
),
|
||||||
];
|
];
|
||||||
for sql in invalid_cases {
|
for (sql, error_msg) in invalid_cases {
|
||||||
|
assert_eq!(
|
||||||
clickhouse_and_generic()
|
clickhouse_and_generic()
|
||||||
.parse_sql_statements(sql)
|
.parse_sql_statements(sql)
|
||||||
.expect_err("Expected: SETTINGS key = value, found: ");
|
.unwrap_err(),
|
||||||
|
ParserError(error_msg.to_string())
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1341,7 +1415,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
clickhouse().verified_stmt(&format!("USE {}", object_name)),
|
clickhouse().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -1349,7 +1423,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
clickhouse().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
|
clickhouse().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
@ -1363,7 +1437,7 @@ fn parse_use() {
|
||||||
fn test_query_with_format_clause() {
|
fn test_query_with_format_clause() {
|
||||||
let format_options = vec!["TabSeparated", "JSONCompact", "NULL"];
|
let format_options = vec!["TabSeparated", "JSONCompact", "NULL"];
|
||||||
for format in &format_options {
|
for format in &format_options {
|
||||||
let sql = format!("SELECT * FROM t FORMAT {}", format);
|
let sql = format!("SELECT * FROM t FORMAT {format}");
|
||||||
match clickhouse_and_generic().verified_stmt(&sql) {
|
match clickhouse_and_generic().verified_stmt(&sql) {
|
||||||
Statement::Query(query) => {
|
Statement::Query(query) => {
|
||||||
if *format == "NULL" {
|
if *format == "NULL" {
|
||||||
|
@ -1546,11 +1620,11 @@ fn parse_select_table_function_settings() {
|
||||||
settings: Some(vec![
|
settings: Some(vec![
|
||||||
Setting {
|
Setting {
|
||||||
key: "s0".into(),
|
key: "s0".into(),
|
||||||
value: Value::Number("3".parse().unwrap(), false),
|
value: Expr::value(number("3")),
|
||||||
},
|
},
|
||||||
Setting {
|
Setting {
|
||||||
key: "s1".into(),
|
key: "s1".into(),
|
||||||
value: Value::SingleQuotedString("s".into()),
|
value: Expr::value(single_quoted_string("s")),
|
||||||
},
|
},
|
||||||
]),
|
]),
|
||||||
},
|
},
|
||||||
|
@ -1571,11 +1645,11 @@ fn parse_select_table_function_settings() {
|
||||||
settings: Some(vec![
|
settings: Some(vec![
|
||||||
Setting {
|
Setting {
|
||||||
key: "s0".into(),
|
key: "s0".into(),
|
||||||
value: Value::Number("3".parse().unwrap(), false),
|
value: Expr::value(number("3")),
|
||||||
},
|
},
|
||||||
Setting {
|
Setting {
|
||||||
key: "s1".into(),
|
key: "s1".into(),
|
||||||
value: Value::SingleQuotedString("s".into()),
|
value: Expr::value(single_quoted_string("s")),
|
||||||
},
|
},
|
||||||
]),
|
]),
|
||||||
},
|
},
|
||||||
|
@ -1585,7 +1659,6 @@ fn parse_select_table_function_settings() {
|
||||||
"SELECT * FROM t(SETTINGS a=)",
|
"SELECT * FROM t(SETTINGS a=)",
|
||||||
"SELECT * FROM t(SETTINGS a=1, b)",
|
"SELECT * FROM t(SETTINGS a=1, b)",
|
||||||
"SELECT * FROM t(SETTINGS a=1, b=)",
|
"SELECT * FROM t(SETTINGS a=1, b=)",
|
||||||
"SELECT * FROM t(SETTINGS a=1, b=c)",
|
|
||||||
];
|
];
|
||||||
for sql in invalid_cases {
|
for sql in invalid_cases {
|
||||||
clickhouse_and_generic()
|
clickhouse_and_generic()
|
||||||
|
@ -1632,6 +1705,30 @@ fn parse_table_sample() {
|
||||||
clickhouse().verified_stmt("SELECT * FROM tbl SAMPLE 1 / 10 OFFSET 1 / 2");
|
clickhouse().verified_stmt("SELECT * FROM tbl SAMPLE 1 / 10 OFFSET 1 / 2");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_not_null_in_column_options() {
|
||||||
|
// In addition to DEFAULT and CHECK ClickHouse also supports MATERIALIZED, all of which
|
||||||
|
// can contain `IS NOT NULL` and thus `NOT NULL` as an alias.
|
||||||
|
let canonical = concat!(
|
||||||
|
"CREATE TABLE foo (",
|
||||||
|
"abc INT DEFAULT (42 IS NOT NULL) NOT NULL,",
|
||||||
|
" not_null BOOL MATERIALIZED (abc IS NOT NULL),",
|
||||||
|
" CHECK (abc IS NOT NULL)",
|
||||||
|
")",
|
||||||
|
);
|
||||||
|
clickhouse().verified_stmt(canonical);
|
||||||
|
clickhouse().one_statement_parses_to(
|
||||||
|
concat!(
|
||||||
|
"CREATE TABLE foo (",
|
||||||
|
"abc INT DEFAULT (42 NOT NULL) NOT NULL,",
|
||||||
|
" not_null BOOL MATERIALIZED (abc NOT NULL),",
|
||||||
|
" CHECK (abc NOT NULL)",
|
||||||
|
")",
|
||||||
|
),
|
||||||
|
canonical,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
fn clickhouse() -> TestedDialects {
|
fn clickhouse() -> TestedDialects {
|
||||||
TestedDialects::new(vec![Box::new(ClickHouseDialect {})])
|
TestedDialects::new(vec![Box::new(ClickHouseDialect {})])
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -19,6 +19,7 @@ use sqlparser::ast::helpers::attached_token::AttachedToken;
|
||||||
use sqlparser::ast::*;
|
use sqlparser::ast::*;
|
||||||
use sqlparser::dialect::{DatabricksDialect, GenericDialect};
|
use sqlparser::dialect::{DatabricksDialect, GenericDialect};
|
||||||
use sqlparser::parser::ParserError;
|
use sqlparser::parser::ParserError;
|
||||||
|
use sqlparser::tokenizer::Span;
|
||||||
use test_utils::*;
|
use test_utils::*;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
|
@ -213,7 +214,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
databricks().verified_stmt(&format!("USE {}", object_name)),
|
databricks().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -221,7 +222,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
databricks().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
|
databricks().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
@ -233,21 +234,21 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with keyword and different type of quotes
|
// Test single identifier with keyword and different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
databricks().verified_stmt(&format!("USE CATALOG {0}my_catalog{0}", quote)),
|
databricks().verified_stmt(&format!("USE CATALOG {quote}my_catalog{quote}")),
|
||||||
Statement::Use(Use::Catalog(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Catalog(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_catalog".to_string(),
|
"my_catalog".to_string(),
|
||||||
)])))
|
)])))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
databricks().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)),
|
databricks().verified_stmt(&format!("USE DATABASE {quote}my_database{quote}")),
|
||||||
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_database".to_string(),
|
"my_database".to_string(),
|
||||||
)])))
|
)])))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
databricks().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)),
|
databricks().verified_stmt(&format!("USE SCHEMA {quote}my_schema{quote}")),
|
||||||
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_schema".to_string(),
|
"my_schema".to_string(),
|
||||||
|
@ -326,10 +327,14 @@ fn data_type_timestamp_ntz() {
|
||||||
// Literal
|
// Literal
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
databricks().verified_expr("TIMESTAMP_NTZ '2025-03-29T18:52:00'"),
|
databricks().verified_expr("TIMESTAMP_NTZ '2025-03-29T18:52:00'"),
|
||||||
Expr::TypedString {
|
Expr::TypedString(TypedString {
|
||||||
data_type: DataType::TimestampNtz,
|
data_type: DataType::TimestampNtz,
|
||||||
value: Value::SingleQuotedString("2025-03-29T18:52:00".to_owned())
|
value: ValueWithSpan {
|
||||||
}
|
value: Value::SingleQuotedString("2025-03-29T18:52:00".to_owned()),
|
||||||
|
span: Span::empty(),
|
||||||
|
},
|
||||||
|
uses_odbc_syntax: false
|
||||||
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
// Cast
|
// Cast
|
||||||
|
@ -357,6 +362,6 @@ fn data_type_timestamp_ntz() {
|
||||||
}]
|
}]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
s => panic!("Unexpected statement: {:?}", s),
|
s => panic!("Unexpected statement: {s:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ use test_utils::*;
|
||||||
|
|
||||||
use sqlparser::ast::*;
|
use sqlparser::ast::*;
|
||||||
use sqlparser::dialect::{DuckDbDialect, GenericDialect};
|
use sqlparser::dialect::{DuckDbDialect, GenericDialect};
|
||||||
|
use sqlparser::parser::ParserError;
|
||||||
|
|
||||||
fn duckdb() -> TestedDialects {
|
fn duckdb() -> TestedDialects {
|
||||||
TestedDialects::new(vec![Box::new(DuckDbDialect {})])
|
TestedDialects::new(vec![Box::new(DuckDbDialect {})])
|
||||||
|
@ -44,10 +45,12 @@ fn test_struct() {
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::new("v")),
|
field_name: Some(Ident::new("v")),
|
||||||
field_type: DataType::Varchar(None),
|
field_type: DataType::Varchar(None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::new("i")),
|
field_name: Some(Ident::new("i")),
|
||||||
field_type: DataType::Integer(None),
|
field_type: DataType::Integer(None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
StructBracketKind::Parentheses,
|
StructBracketKind::Parentheses,
|
||||||
|
@ -84,6 +87,7 @@ fn test_struct() {
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::new("v")),
|
field_name: Some(Ident::new("v")),
|
||||||
field_type: DataType::Varchar(None),
|
field_type: DataType::Varchar(None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::new("s")),
|
field_name: Some(Ident::new("s")),
|
||||||
|
@ -92,14 +96,17 @@ fn test_struct() {
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::new("a1")),
|
field_name: Some(Ident::new("a1")),
|
||||||
field_type: DataType::Integer(None),
|
field_type: DataType::Integer(None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::new("a2")),
|
field_name: Some(Ident::new("a2")),
|
||||||
field_type: DataType::Varchar(None),
|
field_type: DataType::Varchar(None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
StructBracketKind::Parentheses,
|
StructBracketKind::Parentheses,
|
||||||
),
|
),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
StructBracketKind::Parentheses,
|
StructBracketKind::Parentheses,
|
||||||
|
@ -262,6 +269,7 @@ fn test_select_union_by_name() {
|
||||||
distinct: None,
|
distinct: None,
|
||||||
top: None,
|
top: None,
|
||||||
projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())],
|
projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())],
|
||||||
|
exclude: None,
|
||||||
top_before_distinct: false,
|
top_before_distinct: false,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![TableWithJoins {
|
from: vec![TableWithJoins {
|
||||||
|
@ -292,6 +300,7 @@ fn test_select_union_by_name() {
|
||||||
distinct: None,
|
distinct: None,
|
||||||
top: None,
|
top: None,
|
||||||
projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())],
|
projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())],
|
||||||
|
exclude: None,
|
||||||
top_before_distinct: false,
|
top_before_distinct: false,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![TableWithJoins {
|
from: vec![TableWithJoins {
|
||||||
|
@ -362,7 +371,7 @@ fn test_duckdb_specific_int_types() {
|
||||||
("HUGEINT", DataType::HugeInt),
|
("HUGEINT", DataType::HugeInt),
|
||||||
];
|
];
|
||||||
for (dtype_string, data_type) in duckdb_dtypes {
|
for (dtype_string, data_type) in duckdb_dtypes {
|
||||||
let sql = format!("SELECT 123::{}", dtype_string);
|
let sql = format!("SELECT 123::{dtype_string}");
|
||||||
let select = duckdb().verified_only_select(&sql);
|
let select = duckdb().verified_only_select(&sql);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
&Expr::Cast {
|
&Expr::Cast {
|
||||||
|
@ -786,7 +795,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
duckdb().verified_stmt(&format!("USE {}", object_name)),
|
duckdb().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -794,7 +803,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
duckdb().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
|
duckdb().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
@ -806,7 +815,9 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test double identifier with different type of quotes
|
// Test double identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
duckdb().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)),
|
duckdb().verified_stmt(&format!(
|
||||||
|
"USE {quote}CATALOG{quote}.{quote}my_schema{quote}"
|
||||||
|
)),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![
|
Statement::Use(Use::Object(ObjectName::from(vec![
|
||||||
Ident::with_quote(quote, "CATALOG"),
|
Ident::with_quote(quote, "CATALOG"),
|
||||||
Ident::with_quote(quote, "my_schema")
|
Ident::with_quote(quote, "my_schema")
|
||||||
|
@ -822,3 +833,32 @@ fn parse_use() {
|
||||||
])))
|
])))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_duckdb_trim() {
|
||||||
|
let real_sql = r#"SELECT customer_id, TRIM(item_price_id, '"', "a") AS item_price_id FROM models_staging.subscriptions"#;
|
||||||
|
assert_eq!(duckdb().verified_stmt(real_sql).to_string(), real_sql);
|
||||||
|
|
||||||
|
let sql_only_select = "SELECT TRIM('xyz', 'a')";
|
||||||
|
let select = duckdb().verified_only_select(sql_only_select);
|
||||||
|
assert_eq!(
|
||||||
|
&Expr::Trim {
|
||||||
|
expr: Box::new(Expr::Value(
|
||||||
|
Value::SingleQuotedString("xyz".to_owned()).with_empty_span()
|
||||||
|
)),
|
||||||
|
trim_where: None,
|
||||||
|
trim_what: None,
|
||||||
|
trim_characters: Some(vec![Expr::Value(
|
||||||
|
Value::SingleQuotedString("a".to_owned()).with_empty_span()
|
||||||
|
)]),
|
||||||
|
},
|
||||||
|
expr_from_projection(only(&select.projection))
|
||||||
|
);
|
||||||
|
|
||||||
|
// missing comma separation
|
||||||
|
let error_sql = "SELECT TRIM('xyz' 'a')";
|
||||||
|
assert_eq!(
|
||||||
|
ParserError::ParserError("Expected: ), found: 'a'".to_owned()),
|
||||||
|
duckdb().parse_sql_statements(error_sql).unwrap_err()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -524,7 +524,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
hive().verified_stmt(&format!("USE {}", object_name)),
|
hive().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -532,7 +532,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
hive().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
|
hive().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
|
|
@ -32,7 +32,7 @@ use sqlparser::ast::DeclareAssignment::MsSqlAssignment;
|
||||||
use sqlparser::ast::Value::SingleQuotedString;
|
use sqlparser::ast::Value::SingleQuotedString;
|
||||||
use sqlparser::ast::*;
|
use sqlparser::ast::*;
|
||||||
use sqlparser::dialect::{GenericDialect, MsSqlDialect};
|
use sqlparser::dialect::{GenericDialect, MsSqlDialect};
|
||||||
use sqlparser::parser::{Parser, ParserError};
|
use sqlparser::parser::{Parser, ParserError, ParserOptions};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_mssql_identifiers() {
|
fn parse_mssql_identifiers() {
|
||||||
|
@ -126,6 +126,7 @@ fn parse_create_procedure() {
|
||||||
projection: vec![SelectItem::UnnamedExpr(Expr::Value(
|
projection: vec![SelectItem::UnnamedExpr(Expr::Value(
|
||||||
(number("1")).with_empty_span()
|
(number("1")).with_empty_span()
|
||||||
))],
|
))],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![],
|
from: vec![],
|
||||||
lateral_views: vec![],
|
lateral_views: vec![],
|
||||||
|
@ -153,7 +154,8 @@ fn parse_create_procedure() {
|
||||||
quote_style: None,
|
quote_style: None,
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
},
|
},
|
||||||
data_type: DataType::Int(None)
|
data_type: DataType::Int(None),
|
||||||
|
mode: None,
|
||||||
},
|
},
|
||||||
ProcedureParam {
|
ProcedureParam {
|
||||||
name: Ident {
|
name: Ident {
|
||||||
|
@ -164,14 +166,16 @@ fn parse_create_procedure() {
|
||||||
data_type: DataType::Varchar(Some(CharacterLength::IntegerLength {
|
data_type: DataType::Varchar(Some(CharacterLength::IntegerLength {
|
||||||
length: 256,
|
length: 256,
|
||||||
unit: None
|
unit: None
|
||||||
}))
|
})),
|
||||||
|
mode: None,
|
||||||
}
|
}
|
||||||
]),
|
]),
|
||||||
name: ObjectName::from(vec![Ident {
|
name: ObjectName::from(vec![Ident {
|
||||||
value: "test".into(),
|
value: "test".into(),
|
||||||
quote_style: None,
|
quote_style: None,
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
}])
|
}]),
|
||||||
|
language: None,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -1365,6 +1369,7 @@ fn parse_substring_in_select() {
|
||||||
special: true,
|
special: true,
|
||||||
shorthand: false,
|
shorthand: false,
|
||||||
})],
|
})],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![TableWithJoins {
|
from: vec![TableWithJoins {
|
||||||
relation: table_from_name(ObjectName::from(vec![Ident {
|
relation: table_from_name(ObjectName::from(vec![Ident {
|
||||||
|
@ -1513,6 +1518,7 @@ fn parse_mssql_declare() {
|
||||||
(Value::Number("4".parse().unwrap(), false)).with_empty_span()
|
(Value::Number("4".parse().unwrap(), false)).with_empty_span()
|
||||||
)),
|
)),
|
||||||
})],
|
})],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![],
|
from: vec![],
|
||||||
lateral_views: vec![],
|
lateral_views: vec![],
|
||||||
|
@ -1670,7 +1676,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ms().verified_stmt(&format!("USE {}", object_name)),
|
ms().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -1678,7 +1684,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ms().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
|
ms().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
@ -2184,7 +2190,7 @@ fn parse_mssql_if_else() {
|
||||||
"IF 1 = 1 BEGIN SET @A = 1; END ELSE SET @A = 2;"
|
"IF 1 = 1 BEGIN SET @A = 1; END ELSE SET @A = 2;"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
_ => panic!("Unexpected statements: {:?}", stmts),
|
_ => panic!("Unexpected statements: {stmts:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2234,7 +2240,7 @@ fn test_mssql_if_statements_span() {
|
||||||
Span::new(Location::new(1, 21), Location::new(1, 36))
|
Span::new(Location::new(1, 21), Location::new(1, 36))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Blocks
|
// Blocks
|
||||||
|
@ -2255,7 +2261,7 @@ fn test_mssql_if_statements_span() {
|
||||||
Span::new(Location::new(1, 32), Location::new(1, 57))
|
Span::new(Location::new(1, 32), Location::new(1, 57))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2321,6 +2327,18 @@ fn ms() -> TestedDialects {
|
||||||
TestedDialects::new(vec![Box::new(MsSqlDialect {})])
|
TestedDialects::new(vec![Box::new(MsSqlDialect {})])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MS SQL dialect with support for optional semi-colon statement delimiters
|
||||||
|
fn tsql() -> TestedDialects {
|
||||||
|
TestedDialects::new_with_options(
|
||||||
|
vec![Box::new(MsSqlDialect {})],
|
||||||
|
ParserOptions {
|
||||||
|
trailing_commas: false,
|
||||||
|
unescape: true,
|
||||||
|
require_semicolon_stmt_delimiter: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
fn ms_and_generic() -> TestedDialects {
|
fn ms_and_generic() -> TestedDialects {
|
||||||
TestedDialects::new(vec![Box::new(MsSqlDialect {}), Box::new(GenericDialect {})])
|
TestedDialects::new(vec![Box::new(MsSqlDialect {}), Box::new(GenericDialect {})])
|
||||||
}
|
}
|
||||||
|
@ -2477,3 +2495,15 @@ fn parse_mssql_grant() {
|
||||||
fn parse_mssql_deny() {
|
fn parse_mssql_deny() {
|
||||||
ms().verified_stmt("DENY SELECT ON my_table TO public, db_admin");
|
ms().verified_stmt("DENY SELECT ON my_table TO public, db_admin");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_tsql_no_semicolon_delimiter() {
|
||||||
|
let sql = r#"
|
||||||
|
DECLARE @X AS NVARCHAR(MAX)='x'
|
||||||
|
DECLARE @Y AS NVARCHAR(MAX)='y'
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let stmts = tsql().parse_sql_statements(sql).unwrap();
|
||||||
|
assert_eq!(stmts.len(), 2);
|
||||||
|
assert!(stmts.iter().all(|s| matches!(s, Statement::Declare { .. })));
|
||||||
|
}
|
||||||
|
|
|
@ -593,7 +593,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
mysql_and_generic().verified_stmt(&format!("USE {}", object_name)),
|
mysql_and_generic().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -601,8 +601,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
mysql_and_generic()
|
mysql_and_generic().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
.verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
|
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
@ -670,6 +669,20 @@ fn table_constraint_unique_primary_ctor(
|
||||||
characteristics: Option<ConstraintCharacteristics>,
|
characteristics: Option<ConstraintCharacteristics>,
|
||||||
unique_index_type_display: Option<KeyOrIndexDisplay>,
|
unique_index_type_display: Option<KeyOrIndexDisplay>,
|
||||||
) -> TableConstraint {
|
) -> TableConstraint {
|
||||||
|
let columns = columns
|
||||||
|
.into_iter()
|
||||||
|
.map(|ident| IndexColumn {
|
||||||
|
column: OrderByExpr {
|
||||||
|
expr: Expr::Identifier(ident),
|
||||||
|
options: OrderByOptions {
|
||||||
|
asc: None,
|
||||||
|
nulls_first: None,
|
||||||
|
},
|
||||||
|
with_fill: None,
|
||||||
|
},
|
||||||
|
operator_class: None,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
match unique_index_type_display {
|
match unique_index_type_display {
|
||||||
Some(index_type_display) => TableConstraint::Unique {
|
Some(index_type_display) => TableConstraint::Unique {
|
||||||
name,
|
name,
|
||||||
|
@ -795,6 +808,67 @@ fn parse_create_table_primary_and_unique_key_with_index_options() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_prefix_key_part() {
|
||||||
|
let expected = vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::value(
|
||||||
|
number("10"),
|
||||||
|
)))];
|
||||||
|
for sql in [
|
||||||
|
"CREATE INDEX idx_index ON t(textcol(10))",
|
||||||
|
"ALTER TABLE tab ADD INDEX idx_index (textcol(10))",
|
||||||
|
"ALTER TABLE tab ADD PRIMARY KEY (textcol(10))",
|
||||||
|
"ALTER TABLE tab ADD UNIQUE KEY (textcol(10))",
|
||||||
|
"ALTER TABLE tab ADD UNIQUE KEY (textcol(10))",
|
||||||
|
"ALTER TABLE tab ADD FULLTEXT INDEX (textcol(10))",
|
||||||
|
"CREATE TABLE t (textcol TEXT, INDEX idx_index (textcol(10)))",
|
||||||
|
] {
|
||||||
|
match index_column(mysql_and_generic().verified_stmt(sql)) {
|
||||||
|
Expr::Function(Function {
|
||||||
|
name,
|
||||||
|
args: FunctionArguments::List(FunctionArgumentList { args, .. }),
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
assert_eq!(name.to_string(), "textcol");
|
||||||
|
assert_eq!(args, expected);
|
||||||
|
}
|
||||||
|
expr => panic!("unexpected expression {expr} for {sql}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_functional_key_part() {
|
||||||
|
assert_eq!(
|
||||||
|
index_column(
|
||||||
|
mysql_and_generic()
|
||||||
|
.verified_stmt("CREATE INDEX idx_index ON t((col COLLATE utf8mb4_bin) DESC)")
|
||||||
|
),
|
||||||
|
Expr::Nested(Box::new(Expr::Collate {
|
||||||
|
expr: Box::new(Expr::Identifier("col".into())),
|
||||||
|
collation: ObjectName(vec![sqlparser::ast::ObjectNamePart::Identifier(
|
||||||
|
Ident::new("utf8mb4_bin")
|
||||||
|
)]),
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
index_column(mysql_and_generic().verified_stmt(
|
||||||
|
r#"CREATE TABLE t (jsoncol JSON, PRIMARY KEY ((CAST(col ->> '$.id' AS UNSIGNED)) ASC))"#
|
||||||
|
)),
|
||||||
|
Expr::Nested(Box::new(Expr::Cast {
|
||||||
|
kind: CastKind::Cast,
|
||||||
|
expr: Box::new(Expr::BinaryOp {
|
||||||
|
left: Box::new(Expr::Identifier(Ident::new("col"))),
|
||||||
|
op: BinaryOperator::LongArrow,
|
||||||
|
right: Box::new(Expr::Value(
|
||||||
|
Value::SingleQuotedString("$.id".to_string()).with_empty_span()
|
||||||
|
)),
|
||||||
|
}),
|
||||||
|
data_type: DataType::Unsigned,
|
||||||
|
format: None,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_create_table_primary_and_unique_key_with_index_type() {
|
fn parse_create_table_primary_and_unique_key_with_index_type() {
|
||||||
let sqls = ["UNIQUE", "PRIMARY KEY"].map(|key_ty| {
|
let sqls = ["UNIQUE", "PRIMARY KEY"].map(|key_ty| {
|
||||||
|
@ -1329,6 +1403,7 @@ fn parse_escaped_quote_identifiers_with_escape() {
|
||||||
quote_style: Some('`'),
|
quote_style: Some('`'),
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
}))],
|
}))],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![],
|
from: vec![],
|
||||||
lateral_views: vec![],
|
lateral_views: vec![],
|
||||||
|
@ -1367,6 +1442,7 @@ fn parse_escaped_quote_identifiers_with_no_escape() {
|
||||||
ParserOptions {
|
ParserOptions {
|
||||||
trailing_commas: false,
|
trailing_commas: false,
|
||||||
unescape: false,
|
unescape: false,
|
||||||
|
require_semicolon_stmt_delimiter: true,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
.verified_stmt(sql),
|
.verified_stmt(sql),
|
||||||
|
@ -1382,6 +1458,7 @@ fn parse_escaped_quote_identifiers_with_no_escape() {
|
||||||
quote_style: Some('`'),
|
quote_style: Some('`'),
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
}))],
|
}))],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![],
|
from: vec![],
|
||||||
lateral_views: vec![],
|
lateral_views: vec![],
|
||||||
|
@ -1429,6 +1506,7 @@ fn parse_escaped_backticks_with_escape() {
|
||||||
quote_style: Some('`'),
|
quote_style: Some('`'),
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
}))],
|
}))],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![],
|
from: vec![],
|
||||||
lateral_views: vec![],
|
lateral_views: vec![],
|
||||||
|
@ -1480,6 +1558,7 @@ fn parse_escaped_backticks_with_no_escape() {
|
||||||
quote_style: Some('`'),
|
quote_style: Some('`'),
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
}))],
|
}))],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![],
|
from: vec![],
|
||||||
lateral_views: vec![],
|
lateral_views: vec![],
|
||||||
|
@ -1626,6 +1705,51 @@ fn parse_create_table_unsigned() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_signed_data_types() {
|
||||||
|
let sql = "CREATE TABLE foo (bar_tinyint TINYINT(3) SIGNED, bar_smallint SMALLINT(5) SIGNED, bar_mediumint MEDIUMINT(13) SIGNED, bar_int INT(11) SIGNED, bar_bigint BIGINT(20) SIGNED)";
|
||||||
|
let canonical = "CREATE TABLE foo (bar_tinyint TINYINT(3), bar_smallint SMALLINT(5), bar_mediumint MEDIUMINT(13), bar_int INT(11), bar_bigint BIGINT(20))";
|
||||||
|
match mysql().one_statement_parses_to(sql, canonical) {
|
||||||
|
Statement::CreateTable(CreateTable { name, columns, .. }) => {
|
||||||
|
assert_eq!(name.to_string(), "foo");
|
||||||
|
assert_eq!(
|
||||||
|
vec![
|
||||||
|
ColumnDef {
|
||||||
|
name: Ident::new("bar_tinyint"),
|
||||||
|
data_type: DataType::TinyInt(Some(3)),
|
||||||
|
options: vec![],
|
||||||
|
},
|
||||||
|
ColumnDef {
|
||||||
|
name: Ident::new("bar_smallint"),
|
||||||
|
data_type: DataType::SmallInt(Some(5)),
|
||||||
|
options: vec![],
|
||||||
|
},
|
||||||
|
ColumnDef {
|
||||||
|
name: Ident::new("bar_mediumint"),
|
||||||
|
data_type: DataType::MediumInt(Some(13)),
|
||||||
|
options: vec![],
|
||||||
|
},
|
||||||
|
ColumnDef {
|
||||||
|
name: Ident::new("bar_int"),
|
||||||
|
data_type: DataType::Int(Some(11)),
|
||||||
|
options: vec![],
|
||||||
|
},
|
||||||
|
ColumnDef {
|
||||||
|
name: Ident::new("bar_bigint"),
|
||||||
|
data_type: DataType::BigInt(Some(20)),
|
||||||
|
options: vec![],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
columns
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
all_dialects_except(|d| d.supports_data_type_signed_suffix())
|
||||||
|
.run_parser_method(sql, |p| p.parse_statement())
|
||||||
|
.expect_err("SIGNED suffix should not be allowed");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_simple_insert() {
|
fn parse_simple_insert() {
|
||||||
let sql = r"INSERT INTO tasks (title, priority) VALUES ('Test Some Inserts', 1), ('Test Entry 2', 2), ('Test Entry 3', 3)";
|
let sql = r"INSERT INTO tasks (title, priority) VALUES ('Test Some Inserts', 1), ('Test Entry 2', 2), ('Test Entry 3', 3)";
|
||||||
|
@ -2151,6 +2275,7 @@ fn parse_select_with_numeric_prefix_column_name() {
|
||||||
projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident::new(
|
projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident::new(
|
||||||
"123col_$@123abc"
|
"123col_$@123abc"
|
||||||
)))],
|
)))],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![TableWithJoins {
|
from: vec![TableWithJoins {
|
||||||
relation: table_from_name(ObjectName::from(vec![Ident::with_quote(
|
relation: table_from_name(ObjectName::from(vec![Ident::with_quote(
|
||||||
|
@ -2188,11 +2313,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
|
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
|
||||||
assert_eq!(&[Ident::new("t"), Ident::new("15to29")], &parts[..]);
|
assert_eq!(&[Ident::new("t"), Ident::new("15to29")], &parts[..]);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 2: Qualified column name that starts with digits and on its own represents a number.
|
// Case 2: Qualified column name that starts with digits and on its own represents a number.
|
||||||
|
@ -2202,11 +2327,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
|
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
|
||||||
assert_eq!(&[Ident::new("t"), Ident::new("15e29")], &parts[..]);
|
assert_eq!(&[Ident::new("t"), Ident::new("15e29")], &parts[..]);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 3: Unqualified, the same token is parsed as a number.
|
// Case 3: Unqualified, the same token is parsed as a number.
|
||||||
|
@ -2220,11 +2345,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
Some(SelectItem::UnnamedExpr(Expr::Value(ValueWithSpan { value, .. }))) => {
|
Some(SelectItem::UnnamedExpr(Expr::Value(ValueWithSpan { value, .. }))) => {
|
||||||
assert_eq!(&number("15e29"), value);
|
assert_eq!(&number("15e29"), value);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 4: Quoted simple identifier.
|
// Case 4: Quoted simple identifier.
|
||||||
|
@ -2234,11 +2359,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
Some(SelectItem::UnnamedExpr(Expr::Identifier(name))) => {
|
Some(SelectItem::UnnamedExpr(Expr::Identifier(name))) => {
|
||||||
assert_eq!(&Ident::with_quote('`', "15e29"), name);
|
assert_eq!(&Ident::with_quote('`', "15e29"), name);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 5: Quoted compound identifier.
|
// Case 5: Quoted compound identifier.
|
||||||
|
@ -2251,11 +2376,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
&parts[..]
|
&parts[..]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 6: Multi-level compound identifiers.
|
// Case 6: Multi-level compound identifiers.
|
||||||
|
@ -2272,11 +2397,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
&parts[..]
|
&parts[..]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 7: Multi-level compound quoted identifiers.
|
// Case 7: Multi-level compound quoted identifiers.
|
||||||
|
@ -2293,11 +2418,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
&parts[..]
|
&parts[..]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2318,7 +2443,6 @@ fn parse_select_with_concatenation_of_exp_number_and_numeric_prefix_column() {
|
||||||
q.body,
|
q.body,
|
||||||
Box::new(SetExpr::Select(Box::new(Select {
|
Box::new(SetExpr::Select(Box::new(Select {
|
||||||
select_token: AttachedToken::empty(),
|
select_token: AttachedToken::empty(),
|
||||||
|
|
||||||
distinct: None,
|
distinct: None,
|
||||||
top: None,
|
top: None,
|
||||||
top_before_distinct: false,
|
top_before_distinct: false,
|
||||||
|
@ -2326,6 +2450,7 @@ fn parse_select_with_concatenation_of_exp_number_and_numeric_prefix_column() {
|
||||||
SelectItem::UnnamedExpr(Expr::value(number("123e4"))),
|
SelectItem::UnnamedExpr(Expr::value(number("123e4"))),
|
||||||
SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("123col_$@123abc")))
|
SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("123col_$@123abc")))
|
||||||
],
|
],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![TableWithJoins {
|
from: vec![TableWithJoins {
|
||||||
relation: table_from_name(ObjectName::from(vec![Ident::with_quote(
|
relation: table_from_name(ObjectName::from(vec![Ident::with_quote(
|
||||||
|
@ -2802,7 +2927,7 @@ fn parse_alter_table_with_algorithm() {
|
||||||
vec![
|
vec![
|
||||||
AlterTableOperation::DropColumn {
|
AlterTableOperation::DropColumn {
|
||||||
has_column_keyword: true,
|
has_column_keyword: true,
|
||||||
column_name: Ident::new("password_digest"),
|
column_names: vec![Ident::new("password_digest")],
|
||||||
if_exists: false,
|
if_exists: false,
|
||||||
drop_behavior: None,
|
drop_behavior: None,
|
||||||
},
|
},
|
||||||
|
@ -2850,7 +2975,7 @@ fn parse_alter_table_with_lock() {
|
||||||
vec![
|
vec![
|
||||||
AlterTableOperation::DropColumn {
|
AlterTableOperation::DropColumn {
|
||||||
has_column_keyword: true,
|
has_column_keyword: true,
|
||||||
column_name: Ident::new("password_digest"),
|
column_names: vec![Ident::new("password_digest")],
|
||||||
if_exists: false,
|
if_exists: false,
|
||||||
drop_behavior: None,
|
drop_behavior: None,
|
||||||
},
|
},
|
||||||
|
@ -2969,6 +3094,7 @@ fn parse_substring_in_select() {
|
||||||
special: true,
|
special: true,
|
||||||
shorthand: false,
|
shorthand: false,
|
||||||
})],
|
})],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![TableWithJoins {
|
from: vec![TableWithJoins {
|
||||||
relation: table_from_name(ObjectName::from(vec![Ident {
|
relation: table_from_name(ObjectName::from(vec![Ident {
|
||||||
|
@ -3283,6 +3409,7 @@ fn parse_hex_string_introducer() {
|
||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
})],
|
})],
|
||||||
|
exclude: None,
|
||||||
from: vec![],
|
from: vec![],
|
||||||
lateral_views: vec![],
|
lateral_views: vec![],
|
||||||
prewhere: None,
|
prewhere: None,
|
||||||
|
@ -3542,6 +3669,7 @@ fn parse_grant() {
|
||||||
with_grant_option,
|
with_grant_option,
|
||||||
as_grantor: _,
|
as_grantor: _,
|
||||||
granted_by,
|
granted_by,
|
||||||
|
current_grants: _,
|
||||||
} = stmt
|
} = stmt
|
||||||
{
|
{
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -4035,3 +4163,43 @@ fn parse_alter_table_drop_index() {
|
||||||
AlterTableOperation::DropIndex { name } if name.value == "idx_index"
|
AlterTableOperation::DropIndex { name } if name.value == "idx_index"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_json_member_of() {
|
||||||
|
mysql().verified_stmt(r#"SELECT 17 MEMBER OF('[23, "abc", 17, "ab", 10]')"#);
|
||||||
|
let sql = r#"SELECT 'ab' MEMBER OF('[23, "abc", 17, "ab", 10]')"#;
|
||||||
|
let stmt = mysql().verified_stmt(sql);
|
||||||
|
match stmt {
|
||||||
|
Statement::Query(query) => {
|
||||||
|
let select = query.body.as_select().unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
select.projection,
|
||||||
|
vec![SelectItem::UnnamedExpr(Expr::MemberOf(MemberOf {
|
||||||
|
value: Box::new(Expr::Value(
|
||||||
|
Value::SingleQuotedString("ab".to_string()).into()
|
||||||
|
)),
|
||||||
|
array: Box::new(Expr::Value(
|
||||||
|
Value::SingleQuotedString(r#"[23, "abc", 17, "ab", 10]"#.to_string())
|
||||||
|
.into()
|
||||||
|
)),
|
||||||
|
}))]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => panic!("Unexpected statement {stmt}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_show_charset() {
|
||||||
|
let res = mysql().verified_stmt("SHOW CHARACTER SET");
|
||||||
|
assert_eq!(
|
||||||
|
res,
|
||||||
|
Statement::ShowCharset(ShowCharset {
|
||||||
|
is_shorthand: false,
|
||||||
|
filter: None
|
||||||
|
})
|
||||||
|
);
|
||||||
|
mysql().verified_stmt("SHOW CHARACTER SET LIKE 'utf8mb4%'");
|
||||||
|
mysql().verified_stmt("SHOW CHARSET WHERE charset = 'utf8mb4%'");
|
||||||
|
mysql().verified_stmt("SHOW CHARSET LIKE 'utf8mb4%'");
|
||||||
|
}
|
||||||
|
|
|
@ -606,9 +606,10 @@ fn parse_alter_table_constraints_unique_nulls_distinct() {
|
||||||
.verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE NULLS NOT DISTINCT (c)")
|
.verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE NULLS NOT DISTINCT (c)")
|
||||||
{
|
{
|
||||||
Statement::AlterTable { operations, .. } => match &operations[0] {
|
Statement::AlterTable { operations, .. } => match &operations[0] {
|
||||||
AlterTableOperation::AddConstraint(TableConstraint::Unique {
|
AlterTableOperation::AddConstraint {
|
||||||
nulls_distinct, ..
|
constraint: TableConstraint::Unique { nulls_distinct, .. },
|
||||||
}) => {
|
..
|
||||||
|
} => {
|
||||||
assert_eq!(nulls_distinct, &NullsDistinctOption::NotDistinct)
|
assert_eq!(nulls_distinct, &NullsDistinctOption::NotDistinct)
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
|
@ -764,10 +765,7 @@ fn parse_drop_extension() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_alter_table_alter_column() {
|
fn parse_alter_table_alter_column() {
|
||||||
pg().one_statement_parses_to(
|
pg().verified_stmt("ALTER TABLE tab ALTER COLUMN is_active TYPE TEXT USING 'text'");
|
||||||
"ALTER TABLE tab ALTER COLUMN is_active TYPE TEXT USING 'text'",
|
|
||||||
"ALTER TABLE tab ALTER COLUMN is_active SET DATA TYPE TEXT USING 'text'",
|
|
||||||
);
|
|
||||||
|
|
||||||
match alter_table_op(
|
match alter_table_op(
|
||||||
pg().verified_stmt(
|
pg().verified_stmt(
|
||||||
|
@ -783,6 +781,7 @@ fn parse_alter_table_alter_column() {
|
||||||
AlterColumnOperation::SetDataType {
|
AlterColumnOperation::SetDataType {
|
||||||
data_type: DataType::Text,
|
data_type: DataType::Text,
|
||||||
using: Some(using_expr),
|
using: Some(using_expr),
|
||||||
|
had_set: true,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -1306,6 +1305,7 @@ fn parse_copy_to() {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![],
|
from: vec![],
|
||||||
lateral_views: vec![],
|
lateral_views: vec![],
|
||||||
|
@ -1666,7 +1666,9 @@ fn parse_execute() {
|
||||||
has_parentheses: false,
|
has_parentheses: false,
|
||||||
using: vec![],
|
using: vec![],
|
||||||
immediate: false,
|
immediate: false,
|
||||||
into: vec![]
|
into: vec![],
|
||||||
|
output: false,
|
||||||
|
default: false,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1682,7 +1684,9 @@ fn parse_execute() {
|
||||||
has_parentheses: true,
|
has_parentheses: true,
|
||||||
using: vec![],
|
using: vec![],
|
||||||
immediate: false,
|
immediate: false,
|
||||||
into: vec![]
|
into: vec![],
|
||||||
|
output: false,
|
||||||
|
default: false,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1719,7 +1723,9 @@ fn parse_execute() {
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
immediate: false,
|
immediate: false,
|
||||||
into: vec![]
|
into: vec![],
|
||||||
|
output: false,
|
||||||
|
default: false,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -2181,21 +2187,39 @@ fn parse_pg_regex_match_ops() {
|
||||||
("!~*", BinaryOperator::PGRegexNotIMatch),
|
("!~*", BinaryOperator::PGRegexNotIMatch),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
// Match against a single value
|
||||||
for (str_op, op) in pg_regex_match_ops {
|
for (str_op, op) in pg_regex_match_ops {
|
||||||
let select = pg().verified_only_select(&format!("SELECT 'abc' {} '^a'", &str_op));
|
let select = pg().verified_only_select(&format!("SELECT 'abc' {str_op} '^a'"));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
SelectItem::UnnamedExpr(Expr::BinaryOp {
|
SelectItem::UnnamedExpr(Expr::BinaryOp {
|
||||||
left: Box::new(Expr::Value(
|
left: Box::new(Expr::Value(single_quoted_string("abc").with_empty_span(),)),
|
||||||
(Value::SingleQuotedString("abc".into())).with_empty_span()
|
|
||||||
)),
|
|
||||||
op: op.clone(),
|
op: op.clone(),
|
||||||
right: Box::new(Expr::Value(
|
right: Box::new(Expr::Value(single_quoted_string("^a").with_empty_span(),)),
|
||||||
(Value::SingleQuotedString("^a".into())).with_empty_span()
|
|
||||||
)),
|
|
||||||
}),
|
}),
|
||||||
select.projection[0]
|
select.projection[0]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Match against any value from an array
|
||||||
|
for (str_op, op) in pg_regex_match_ops {
|
||||||
|
let select =
|
||||||
|
pg().verified_only_select(&format!("SELECT 'abc' {str_op} ANY(ARRAY['^a', 'x'])"));
|
||||||
|
assert_eq!(
|
||||||
|
SelectItem::UnnamedExpr(Expr::AnyOp {
|
||||||
|
left: Box::new(Expr::Value(single_quoted_string("abc").with_empty_span(),)),
|
||||||
|
compare_op: op.clone(),
|
||||||
|
right: Box::new(Expr::Array(Array {
|
||||||
|
elem: vec![
|
||||||
|
Expr::Value(single_quoted_string("^a").with_empty_span()),
|
||||||
|
Expr::Value(single_quoted_string("x").with_empty_span()),
|
||||||
|
],
|
||||||
|
named: true,
|
||||||
|
})),
|
||||||
|
is_some: false,
|
||||||
|
}),
|
||||||
|
select.projection[0]
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -2207,21 +2231,35 @@ fn parse_pg_like_match_ops() {
|
||||||
("!~~*", BinaryOperator::PGNotILikeMatch),
|
("!~~*", BinaryOperator::PGNotILikeMatch),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
// Match against a single value
|
||||||
for (str_op, op) in pg_like_match_ops {
|
for (str_op, op) in pg_like_match_ops {
|
||||||
let select = pg().verified_only_select(&format!("SELECT 'abc' {} 'a_c%'", &str_op));
|
let select = pg().verified_only_select(&format!("SELECT 'abc' {str_op} 'a_c%'"));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
SelectItem::UnnamedExpr(Expr::BinaryOp {
|
SelectItem::UnnamedExpr(Expr::BinaryOp {
|
||||||
left: Box::new(Expr::Value(
|
left: Box::new(Expr::Value(single_quoted_string("abc").with_empty_span(),)),
|
||||||
(Value::SingleQuotedString("abc".into())).with_empty_span()
|
|
||||||
)),
|
|
||||||
op: op.clone(),
|
op: op.clone(),
|
||||||
right: Box::new(Expr::Value(
|
right: Box::new(Expr::Value(single_quoted_string("a_c%").with_empty_span(),)),
|
||||||
(Value::SingleQuotedString("a_c%".into())).with_empty_span()
|
|
||||||
)),
|
|
||||||
}),
|
}),
|
||||||
select.projection[0]
|
select.projection[0]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Match against all values from an array
|
||||||
|
for (str_op, op) in pg_like_match_ops {
|
||||||
|
let select =
|
||||||
|
pg().verified_only_select(&format!("SELECT 'abc' {str_op} ALL(ARRAY['a_c%'])"));
|
||||||
|
assert_eq!(
|
||||||
|
SelectItem::UnnamedExpr(Expr::AllOp {
|
||||||
|
left: Box::new(Expr::Value(single_quoted_string("abc").with_empty_span(),)),
|
||||||
|
compare_op: op.clone(),
|
||||||
|
right: Box::new(Expr::Array(Array {
|
||||||
|
elem: vec![Expr::Value(single_quoted_string("a_c%").with_empty_span())],
|
||||||
|
named: true,
|
||||||
|
})),
|
||||||
|
}),
|
||||||
|
select.projection[0]
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -2535,12 +2573,12 @@ fn parse_create_indices_with_operator_classes() {
|
||||||
for expected_operator_class in &operator_classes {
|
for expected_operator_class in &operator_classes {
|
||||||
let single_column_sql_statement = format!(
|
let single_column_sql_statement = format!(
|
||||||
"CREATE INDEX the_index_name ON users USING {expected_index_type} (concat_users_name(first_name, last_name){})",
|
"CREATE INDEX the_index_name ON users USING {expected_index_type} (concat_users_name(first_name, last_name){})",
|
||||||
expected_operator_class.as_ref().map(|oc| format!(" {}", oc))
|
expected_operator_class.as_ref().map(|oc| format!(" {oc}"))
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
);
|
);
|
||||||
let multi_column_sql_statement = format!(
|
let multi_column_sql_statement = format!(
|
||||||
"CREATE INDEX the_index_name ON users USING {expected_index_type} (column_name,concat_users_name(first_name, last_name){})",
|
"CREATE INDEX the_index_name ON users USING {expected_index_type} (column_name,concat_users_name(first_name, last_name){})",
|
||||||
expected_operator_class.as_ref().map(|oc| format!(" {}", oc))
|
expected_operator_class.as_ref().map(|oc| format!(" {oc}"))
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -2949,6 +2987,7 @@ fn parse_array_subquery_expr() {
|
||||||
projection: vec![SelectItem::UnnamedExpr(Expr::Value(
|
projection: vec![SelectItem::UnnamedExpr(Expr::Value(
|
||||||
(number("1")).with_empty_span()
|
(number("1")).with_empty_span()
|
||||||
))],
|
))],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![],
|
from: vec![],
|
||||||
lateral_views: vec![],
|
lateral_views: vec![],
|
||||||
|
@ -2974,6 +3013,7 @@ fn parse_array_subquery_expr() {
|
||||||
projection: vec![SelectItem::UnnamedExpr(Expr::Value(
|
projection: vec![SelectItem::UnnamedExpr(Expr::Value(
|
||||||
(number("2")).with_empty_span()
|
(number("2")).with_empty_span()
|
||||||
))],
|
))],
|
||||||
|
exclude: None,
|
||||||
into: None,
|
into: None,
|
||||||
from: vec![],
|
from: vec![],
|
||||||
lateral_views: vec![],
|
lateral_views: vec![],
|
||||||
|
@ -3273,7 +3313,7 @@ fn test_fn_arg_with_value_operator() {
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
&args[..],
|
&args[..],
|
||||||
&[FunctionArg::ExprNamed { operator: FunctionArgOperator::Value, .. }]
|
&[FunctionArg::ExprNamed { operator: FunctionArgOperator::Value, .. }]
|
||||||
), "Invalid function argument: {:?}", args);
|
), "Invalid function argument: {args:?}");
|
||||||
}
|
}
|
||||||
other => panic!("Expected: JSON_OBJECT('name' VALUE 'value') to be parsed as a function, but got {other:?}"),
|
other => panic!("Expected: JSON_OBJECT('name' VALUE 'value') to be parsed as a function, but got {other:?}"),
|
||||||
}
|
}
|
||||||
|
@ -4683,7 +4723,7 @@ fn parse_dollar_quoted_string() {
|
||||||
quote_style: None,
|
quote_style: None,
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -5256,10 +5296,14 @@ fn parse_at_time_zone() {
|
||||||
// check precedence
|
// check precedence
|
||||||
let expr = Expr::BinaryOp {
|
let expr = Expr::BinaryOp {
|
||||||
left: Box::new(Expr::AtTimeZone {
|
left: Box::new(Expr::AtTimeZone {
|
||||||
timestamp: Box::new(Expr::TypedString {
|
timestamp: Box::new(Expr::TypedString(TypedString {
|
||||||
data_type: DataType::Timestamp(None, TimezoneInfo::None),
|
data_type: DataType::Timestamp(None, TimezoneInfo::None),
|
||||||
|
value: ValueWithSpan {
|
||||||
value: Value::SingleQuotedString("2001-09-28 01:00".to_string()),
|
value: Value::SingleQuotedString("2001-09-28 01:00".to_string()),
|
||||||
}),
|
span: Span::empty(),
|
||||||
|
},
|
||||||
|
uses_odbc_syntax: false,
|
||||||
|
})),
|
||||||
time_zone: Box::new(Expr::Cast {
|
time_zone: Box::new(Expr::Cast {
|
||||||
kind: CastKind::DoubleColon,
|
kind: CastKind::DoubleColon,
|
||||||
expr: Box::new(Expr::Value(
|
expr: Box::new(Expr::Value(
|
||||||
|
@ -5288,6 +5332,44 @@ fn parse_at_time_zone() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_interval_data_type() {
|
||||||
|
pg_and_generic().verified_stmt("CREATE TABLE t (i INTERVAL)");
|
||||||
|
for p in 0..=6 {
|
||||||
|
pg_and_generic().verified_stmt(&format!("CREATE TABLE t (i INTERVAL({p}))"));
|
||||||
|
pg_and_generic().verified_stmt(&format!("SELECT '1 second'::INTERVAL({p})"));
|
||||||
|
pg_and_generic().verified_stmt(&format!("SELECT CAST('1 second' AS INTERVAL({p}))"));
|
||||||
|
}
|
||||||
|
let fields = [
|
||||||
|
"YEAR",
|
||||||
|
"MONTH",
|
||||||
|
"DAY",
|
||||||
|
"HOUR",
|
||||||
|
"MINUTE",
|
||||||
|
"SECOND",
|
||||||
|
"YEAR TO MONTH",
|
||||||
|
"DAY TO HOUR",
|
||||||
|
"DAY TO MINUTE",
|
||||||
|
"DAY TO SECOND",
|
||||||
|
"HOUR TO MINUTE",
|
||||||
|
"HOUR TO SECOND",
|
||||||
|
"MINUTE TO SECOND",
|
||||||
|
];
|
||||||
|
for field in fields {
|
||||||
|
pg_and_generic().verified_stmt(&format!("CREATE TABLE t (i INTERVAL {field})"));
|
||||||
|
pg_and_generic().verified_stmt(&format!("SELECT '1 second'::INTERVAL {field}"));
|
||||||
|
pg_and_generic().verified_stmt(&format!("SELECT CAST('1 second' AS INTERVAL {field})"));
|
||||||
|
}
|
||||||
|
for p in 0..=6 {
|
||||||
|
for field in fields {
|
||||||
|
pg_and_generic().verified_stmt(&format!("CREATE TABLE t (i INTERVAL {field}({p}))"));
|
||||||
|
pg_and_generic().verified_stmt(&format!("SELECT '1 second'::INTERVAL {field}({p})"));
|
||||||
|
pg_and_generic()
|
||||||
|
.verified_stmt(&format!("SELECT CAST('1 second' AS INTERVAL {field}({p}))"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_create_table_with_options() {
|
fn parse_create_table_with_options() {
|
||||||
let sql = "CREATE TABLE t (c INT) WITH (foo = 'bar', a = 123)";
|
let sql = "CREATE TABLE t (c INT) WITH (foo = 'bar', a = 123)";
|
||||||
|
@ -5679,7 +5761,7 @@ fn parse_drop_trigger() {
|
||||||
"DROP TRIGGER{} check_update ON table_name{}",
|
"DROP TRIGGER{} check_update ON table_name{}",
|
||||||
if if_exists { " IF EXISTS" } else { "" },
|
if if_exists { " IF EXISTS" } else { "" },
|
||||||
option
|
option
|
||||||
.map(|o| format!(" {}", o))
|
.map(|o| format!(" {o}"))
|
||||||
.unwrap_or_else(|| "".to_string())
|
.unwrap_or_else(|| "".to_string())
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -5773,8 +5855,7 @@ fn parse_trigger_related_functions() {
|
||||||
// Now we parse the statements and check if they are parsed correctly.
|
// Now we parse the statements and check if they are parsed correctly.
|
||||||
let mut statements = pg()
|
let mut statements = pg()
|
||||||
.parse_sql_statements(&format!(
|
.parse_sql_statements(&format!(
|
||||||
"{}{}{}{}",
|
"{sql_table_creation}{sql_create_function}{sql_create_trigger}{sql_drop_trigger}"
|
||||||
sql_table_creation, sql_create_function, sql_create_trigger, sql_drop_trigger
|
|
||||||
))
|
))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
@ -6201,3 +6282,153 @@ fn parse_alter_table_replica_identity() {
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_ts_datatypes() {
|
||||||
|
match pg_and_generic().verified_stmt("CREATE TABLE foo (x TSVECTOR)") {
|
||||||
|
Statement::CreateTable(CreateTable { columns, .. }) => {
|
||||||
|
assert_eq!(
|
||||||
|
columns,
|
||||||
|
vec![ColumnDef {
|
||||||
|
name: "x".into(),
|
||||||
|
data_type: DataType::TsVector,
|
||||||
|
options: vec![],
|
||||||
|
}]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
match pg_and_generic().verified_stmt("CREATE TABLE foo (x TSQUERY)") {
|
||||||
|
Statement::CreateTable(CreateTable { columns, .. }) => {
|
||||||
|
assert_eq!(
|
||||||
|
columns,
|
||||||
|
vec![ColumnDef {
|
||||||
|
name: "x".into(),
|
||||||
|
data_type: DataType::TsQuery,
|
||||||
|
options: vec![],
|
||||||
|
}]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_alter_table_constraint_not_valid() {
|
||||||
|
match pg_and_generic().verified_stmt(
|
||||||
|
"ALTER TABLE foo ADD CONSTRAINT bar FOREIGN KEY (baz) REFERENCES other(ref) NOT VALID",
|
||||||
|
) {
|
||||||
|
Statement::AlterTable { operations, .. } => {
|
||||||
|
assert_eq!(
|
||||||
|
operations,
|
||||||
|
vec![AlterTableOperation::AddConstraint {
|
||||||
|
constraint: TableConstraint::ForeignKey {
|
||||||
|
name: Some("bar".into()),
|
||||||
|
index_name: None,
|
||||||
|
columns: vec!["baz".into()],
|
||||||
|
foreign_table: ObjectName::from(vec!["other".into()]),
|
||||||
|
referred_columns: vec!["ref".into()],
|
||||||
|
on_delete: None,
|
||||||
|
on_update: None,
|
||||||
|
characteristics: None,
|
||||||
|
},
|
||||||
|
not_valid: true,
|
||||||
|
}]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_alter_table_validate_constraint() {
|
||||||
|
match pg_and_generic().verified_stmt("ALTER TABLE foo VALIDATE CONSTRAINT bar") {
|
||||||
|
Statement::AlterTable { operations, .. } => {
|
||||||
|
assert_eq!(
|
||||||
|
operations,
|
||||||
|
vec![AlterTableOperation::ValidateConstraint { name: "bar".into() }]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_create_server() {
|
||||||
|
let test_cases = vec![
|
||||||
|
(
|
||||||
|
"CREATE SERVER myserver FOREIGN DATA WRAPPER postgres_fdw",
|
||||||
|
CreateServerStatement {
|
||||||
|
name: ObjectName::from(vec!["myserver".into()]),
|
||||||
|
if_not_exists: false,
|
||||||
|
server_type: None,
|
||||||
|
version: None,
|
||||||
|
foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]),
|
||||||
|
options: None,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"CREATE SERVER IF NOT EXISTS myserver TYPE 'server_type' VERSION 'server_version' FOREIGN DATA WRAPPER postgres_fdw",
|
||||||
|
CreateServerStatement {
|
||||||
|
name: ObjectName::from(vec!["myserver".into()]),
|
||||||
|
if_not_exists: true,
|
||||||
|
server_type: Some(Ident {
|
||||||
|
value: "server_type".to_string(),
|
||||||
|
quote_style: Some('\''),
|
||||||
|
span: Span::empty(),
|
||||||
|
}),
|
||||||
|
version: Some(Ident {
|
||||||
|
value: "server_version".to_string(),
|
||||||
|
quote_style: Some('\''),
|
||||||
|
span: Span::empty(),
|
||||||
|
}),
|
||||||
|
foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]),
|
||||||
|
options: None,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"CREATE SERVER myserver2 FOREIGN DATA WRAPPER postgres_fdw OPTIONS (host 'foo', dbname 'foodb', port '5432')",
|
||||||
|
CreateServerStatement {
|
||||||
|
name: ObjectName::from(vec!["myserver2".into()]),
|
||||||
|
if_not_exists: false,
|
||||||
|
server_type: None,
|
||||||
|
version: None,
|
||||||
|
foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]),
|
||||||
|
options: Some(vec![
|
||||||
|
CreateServerOption {
|
||||||
|
key: "host".into(),
|
||||||
|
value: Ident {
|
||||||
|
value: "foo".to_string(),
|
||||||
|
quote_style: Some('\''),
|
||||||
|
span: Span::empty(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
CreateServerOption {
|
||||||
|
key: "dbname".into(),
|
||||||
|
value: Ident {
|
||||||
|
value: "foodb".to_string(),
|
||||||
|
quote_style: Some('\''),
|
||||||
|
span: Span::empty(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
CreateServerOption {
|
||||||
|
key: "port".into(),
|
||||||
|
value: Ident {
|
||||||
|
value: "5432".to_string(),
|
||||||
|
quote_style: Some('\''),
|
||||||
|
span: Span::empty(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
];
|
||||||
|
|
||||||
|
for (sql, expected) in test_cases {
|
||||||
|
let Statement::CreateServer(stmt) = pg_and_generic().verified_stmt(sql) else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
assert_eq!(stmt, expected);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -402,3 +402,8 @@ fn parse_extract_single_quotes() {
|
||||||
fn parse_string_literal_backslash_escape() {
|
fn parse_string_literal_backslash_escape() {
|
||||||
redshift().one_statement_parses_to(r#"SELECT 'l\'auto'"#, "SELECT 'l''auto'");
|
redshift().one_statement_parses_to(r#"SELECT 'l\'auto'"#, "SELECT 'l''auto'");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_utf8_multibyte_idents() {
|
||||||
|
redshift().verified_stmt("SELECT 🚀.city AS 🎸 FROM customers AS 🚀");
|
||||||
|
}
|
||||||
|
|
|
@ -270,8 +270,8 @@ fn test_snowflake_create_table_with_tag() {
|
||||||
assert_eq!("my_table", name.to_string());
|
assert_eq!("my_table", name.to_string());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Some(vec![
|
Some(vec![
|
||||||
Tag::new("A".into(), "TAG A".to_string()),
|
Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".to_string()),
|
||||||
Tag::new("B".into(), "TAG B".to_string())
|
Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".to_string())
|
||||||
]),
|
]),
|
||||||
with_tags
|
with_tags
|
||||||
);
|
);
|
||||||
|
@ -291,8 +291,8 @@ fn test_snowflake_create_table_with_tag() {
|
||||||
assert_eq!("my_table", name.to_string());
|
assert_eq!("my_table", name.to_string());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Some(vec![
|
Some(vec![
|
||||||
Tag::new("A".into(), "TAG A".to_string()),
|
Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".to_string()),
|
||||||
Tag::new("B".into(), "TAG B".to_string())
|
Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".to_string())
|
||||||
]),
|
]),
|
||||||
with_tags
|
with_tags
|
||||||
);
|
);
|
||||||
|
@ -471,15 +471,31 @@ fn test_snowflake_create_table_if_not_exists() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_snowflake_create_table_cluster_by() {
|
fn test_snowflake_create_table_cluster_by() {
|
||||||
match snowflake().verified_stmt("CREATE TABLE my_table (a INT) CLUSTER BY (a, b)") {
|
match snowflake().verified_stmt("CREATE TABLE my_table (a INT) CLUSTER BY (a, b, my_func(c))") {
|
||||||
Statement::CreateTable(CreateTable {
|
Statement::CreateTable(CreateTable {
|
||||||
name, cluster_by, ..
|
name, cluster_by, ..
|
||||||
}) => {
|
}) => {
|
||||||
assert_eq!("my_table", name.to_string());
|
assert_eq!("my_table", name.to_string());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Some(WrappedCollection::Parentheses(vec![
|
Some(WrappedCollection::Parentheses(vec![
|
||||||
Ident::new("a"),
|
Expr::Identifier(Ident::new("a")),
|
||||||
Ident::new("b"),
|
Expr::Identifier(Ident::new("b")),
|
||||||
|
Expr::Function(Function {
|
||||||
|
name: ObjectName::from(vec![Ident::new("my_func")]),
|
||||||
|
uses_odbc_syntax: false,
|
||||||
|
parameters: FunctionArguments::None,
|
||||||
|
args: FunctionArguments::List(FunctionArgumentList {
|
||||||
|
args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(
|
||||||
|
Expr::Identifier(Ident::new("c"))
|
||||||
|
))],
|
||||||
|
duplicate_treatment: None,
|
||||||
|
clauses: vec![],
|
||||||
|
}),
|
||||||
|
filter: None,
|
||||||
|
null_treatment: None,
|
||||||
|
over: None,
|
||||||
|
within_group: vec![],
|
||||||
|
}),
|
||||||
])),
|
])),
|
||||||
cluster_by
|
cluster_by
|
||||||
)
|
)
|
||||||
|
@ -715,7 +731,7 @@ fn test_snowflake_create_table_with_columns_masking_policy() {
|
||||||
option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy(
|
option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy(
|
||||||
ColumnPolicyProperty {
|
ColumnPolicyProperty {
|
||||||
with,
|
with,
|
||||||
policy_name: "p".into(),
|
policy_name: ObjectName::from(vec![Ident::new("p")]),
|
||||||
using_columns,
|
using_columns,
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
|
@ -749,7 +765,7 @@ fn test_snowflake_create_table_with_columns_projection_policy() {
|
||||||
option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(
|
option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(
|
||||||
ColumnPolicyProperty {
|
ColumnPolicyProperty {
|
||||||
with,
|
with,
|
||||||
policy_name: "p".into(),
|
policy_name: ObjectName::from(vec![Ident::new("p")]),
|
||||||
using_columns: None,
|
using_columns: None,
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
|
@ -786,8 +802,14 @@ fn test_snowflake_create_table_with_columns_tags() {
|
||||||
option: ColumnOption::Tags(TagsColumnOption {
|
option: ColumnOption::Tags(TagsColumnOption {
|
||||||
with,
|
with,
|
||||||
tags: vec![
|
tags: vec![
|
||||||
Tag::new("A".into(), "TAG A".into()),
|
Tag::new(
|
||||||
Tag::new("B".into(), "TAG B".into()),
|
ObjectName::from(vec![Ident::new("A")]),
|
||||||
|
"TAG A".into()
|
||||||
|
),
|
||||||
|
Tag::new(
|
||||||
|
ObjectName::from(vec![Ident::new("B")]),
|
||||||
|
"TAG B".into()
|
||||||
|
),
|
||||||
]
|
]
|
||||||
}),
|
}),
|
||||||
}],
|
}],
|
||||||
|
@ -830,7 +852,7 @@ fn test_snowflake_create_table_with_several_column_options() {
|
||||||
option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy(
|
option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy(
|
||||||
ColumnPolicyProperty {
|
ColumnPolicyProperty {
|
||||||
with: true,
|
with: true,
|
||||||
policy_name: "p1".into(),
|
policy_name: ObjectName::from(vec![Ident::new("p1")]),
|
||||||
using_columns: Some(vec!["a".into(), "b".into()]),
|
using_columns: Some(vec!["a".into(), "b".into()]),
|
||||||
}
|
}
|
||||||
)),
|
)),
|
||||||
|
@ -840,8 +862,14 @@ fn test_snowflake_create_table_with_several_column_options() {
|
||||||
option: ColumnOption::Tags(TagsColumnOption {
|
option: ColumnOption::Tags(TagsColumnOption {
|
||||||
with: true,
|
with: true,
|
||||||
tags: vec![
|
tags: vec![
|
||||||
Tag::new("A".into(), "TAG A".into()),
|
Tag::new(
|
||||||
Tag::new("B".into(), "TAG B".into()),
|
ObjectName::from(vec![Ident::new("A")]),
|
||||||
|
"TAG A".into()
|
||||||
|
),
|
||||||
|
Tag::new(
|
||||||
|
ObjectName::from(vec![Ident::new("B")]),
|
||||||
|
"TAG B".into()
|
||||||
|
),
|
||||||
]
|
]
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
@ -862,7 +890,7 @@ fn test_snowflake_create_table_with_several_column_options() {
|
||||||
option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(
|
option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(
|
||||||
ColumnPolicyProperty {
|
ColumnPolicyProperty {
|
||||||
with: false,
|
with: false,
|
||||||
policy_name: "p2".into(),
|
policy_name: ObjectName::from(vec![Ident::new("p2")]),
|
||||||
using_columns: None,
|
using_columns: None,
|
||||||
}
|
}
|
||||||
)),
|
)),
|
||||||
|
@ -872,8 +900,14 @@ fn test_snowflake_create_table_with_several_column_options() {
|
||||||
option: ColumnOption::Tags(TagsColumnOption {
|
option: ColumnOption::Tags(TagsColumnOption {
|
||||||
with: false,
|
with: false,
|
||||||
tags: vec![
|
tags: vec![
|
||||||
Tag::new("C".into(), "TAG C".into()),
|
Tag::new(
|
||||||
Tag::new("D".into(), "TAG D".into()),
|
ObjectName::from(vec![Ident::new("C")]),
|
||||||
|
"TAG C".into()
|
||||||
|
),
|
||||||
|
Tag::new(
|
||||||
|
ObjectName::from(vec![Ident::new("D")]),
|
||||||
|
"TAG D".into()
|
||||||
|
),
|
||||||
]
|
]
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
@ -903,8 +937,8 @@ fn test_snowflake_create_iceberg_table_all_options() {
|
||||||
assert_eq!("my_table", name.to_string());
|
assert_eq!("my_table", name.to_string());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Some(WrappedCollection::Parentheses(vec![
|
Some(WrappedCollection::Parentheses(vec![
|
||||||
Ident::new("a"),
|
Expr::Identifier(Ident::new("a")),
|
||||||
Ident::new("b"),
|
Expr::Identifier(Ident::new("b")),
|
||||||
])),
|
])),
|
||||||
cluster_by
|
cluster_by
|
||||||
);
|
);
|
||||||
|
@ -926,8 +960,8 @@ fn test_snowflake_create_iceberg_table_all_options() {
|
||||||
with_aggregation_policy.map(|name| name.to_string())
|
with_aggregation_policy.map(|name| name.to_string())
|
||||||
);
|
);
|
||||||
assert_eq!(Some(vec![
|
assert_eq!(Some(vec![
|
||||||
Tag::new("A".into(), "TAG A".into()),
|
Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".into()),
|
||||||
Tag::new("B".into(), "TAG B".into()),
|
Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".into()),
|
||||||
]), with_tags);
|
]), with_tags);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -961,6 +995,51 @@ fn test_snowflake_create_iceberg_table_without_location() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_snowflake_create_table_trailing_options() {
|
||||||
|
// Serialization to SQL assume that in `CREATE TABLE AS` the options come before the `AS (<query>)`
|
||||||
|
// but Snowflake supports also the other way around
|
||||||
|
snowflake()
|
||||||
|
.verified_stmt("CREATE TEMPORARY TABLE dst ON COMMIT PRESERVE ROWS AS (SELECT * FROM src)");
|
||||||
|
snowflake()
|
||||||
|
.parse_sql_statements(
|
||||||
|
"CREATE TEMPORARY TABLE dst AS (SELECT * FROM src) ON COMMIT PRESERVE ROWS",
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Same for `CREATE TABLE LIKE|CLONE`:
|
||||||
|
snowflake().verified_stmt("CREATE TEMPORARY TABLE dst LIKE src ON COMMIT PRESERVE ROWS");
|
||||||
|
snowflake()
|
||||||
|
.parse_sql_statements("CREATE TEMPORARY TABLE dst ON COMMIT PRESERVE ROWS LIKE src")
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
snowflake().verified_stmt("CREATE TEMPORARY TABLE dst CLONE src ON COMMIT PRESERVE ROWS");
|
||||||
|
snowflake()
|
||||||
|
.parse_sql_statements("CREATE TEMPORARY TABLE dst ON COMMIT PRESERVE ROWS CLONE src")
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_snowflake_create_table_valid_schema_info() {
|
||||||
|
// Validate there's exactly one source of information on the schema of the new table
|
||||||
|
assert_eq!(
|
||||||
|
snowflake()
|
||||||
|
.parse_sql_statements("CREATE TABLE dst")
|
||||||
|
.is_err(),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
snowflake().parse_sql_statements("CREATE OR REPLACE TEMP TABLE dst LIKE src AS (SELECT * FROM CUSTOMERS) ON COMMIT PRESERVE ROWS").is_err(),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
snowflake()
|
||||||
|
.parse_sql_statements("CREATE OR REPLACE TEMP TABLE dst CLONE customers LIKE customer2")
|
||||||
|
.is_err(),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_sf_create_or_replace_view_with_comment_missing_equal() {
|
fn parse_sf_create_or_replace_view_with_comment_missing_equal() {
|
||||||
assert!(snowflake_and_generic()
|
assert!(snowflake_and_generic()
|
||||||
|
@ -2494,10 +2573,7 @@ fn test_snowflake_stage_object_names_into_location() {
|
||||||
.zip(allowed_object_names.iter_mut())
|
.zip(allowed_object_names.iter_mut())
|
||||||
{
|
{
|
||||||
let (formatted_name, object_name) = it;
|
let (formatted_name, object_name) = it;
|
||||||
let sql = format!(
|
let sql = format!("COPY INTO {formatted_name} FROM 'gcs://mybucket/./../a.csv'");
|
||||||
"COPY INTO {} FROM 'gcs://mybucket/./../a.csv'",
|
|
||||||
formatted_name
|
|
||||||
);
|
|
||||||
match snowflake().verified_stmt(&sql) {
|
match snowflake().verified_stmt(&sql) {
|
||||||
Statement::CopyIntoSnowflake { into, .. } => {
|
Statement::CopyIntoSnowflake { into, .. } => {
|
||||||
assert_eq!(into.0, object_name.0)
|
assert_eq!(into.0, object_name.0)
|
||||||
|
@ -2520,10 +2596,7 @@ fn test_snowflake_stage_object_names_into_table() {
|
||||||
.zip(allowed_object_names.iter_mut())
|
.zip(allowed_object_names.iter_mut())
|
||||||
{
|
{
|
||||||
let (formatted_name, object_name) = it;
|
let (formatted_name, object_name) = it;
|
||||||
let sql = format!(
|
let sql = format!("COPY INTO {formatted_name} FROM 'gcs://mybucket/./../a.csv'");
|
||||||
"COPY INTO {} FROM 'gcs://mybucket/./../a.csv'",
|
|
||||||
formatted_name
|
|
||||||
);
|
|
||||||
match snowflake().verified_stmt(&sql) {
|
match snowflake().verified_stmt(&sql) {
|
||||||
Statement::CopyIntoSnowflake { into, .. } => {
|
Statement::CopyIntoSnowflake { into, .. } => {
|
||||||
assert_eq!(into.0, object_name.0)
|
assert_eq!(into.0, object_name.0)
|
||||||
|
@ -2553,6 +2626,26 @@ fn test_snowflake_copy_into() {
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Test for non-ident characters in stage names
|
||||||
|
let sql = "COPY INTO a.b FROM @namespace.stage_name/x@x~x%x+/20250723_data";
|
||||||
|
assert_eq!(snowflake().verified_stmt(sql).to_string(), sql);
|
||||||
|
match snowflake().verified_stmt(sql) {
|
||||||
|
Statement::CopyIntoSnowflake { into, from_obj, .. } => {
|
||||||
|
assert_eq!(
|
||||||
|
into,
|
||||||
|
ObjectName::from(vec![Ident::new("a"), Ident::new("b")])
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
from_obj,
|
||||||
|
Some(ObjectName::from(vec![
|
||||||
|
Ident::new("@namespace"),
|
||||||
|
Ident::new("stage_name/x@x~x%x+/20250723_data")
|
||||||
|
]))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -3004,7 +3097,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE {}", object_name)),
|
snowflake().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -3012,7 +3105,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
|
snowflake().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
@ -3024,7 +3117,9 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test double identifier with different type of quotes
|
// Test double identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)),
|
snowflake().verified_stmt(&format!(
|
||||||
|
"USE {quote}CATALOG{quote}.{quote}my_schema{quote}"
|
||||||
|
)),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![
|
Statement::Use(Use::Object(ObjectName::from(vec![
|
||||||
Ident::with_quote(quote, "CATALOG"),
|
Ident::with_quote(quote, "CATALOG"),
|
||||||
Ident::with_quote(quote, "my_schema")
|
Ident::with_quote(quote, "my_schema")
|
||||||
|
@ -3043,35 +3138,37 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single and double identifier with keyword and different type of quotes
|
// Test single and double identifier with keyword and different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)),
|
snowflake().verified_stmt(&format!("USE DATABASE {quote}my_database{quote}")),
|
||||||
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_database".to_string(),
|
"my_database".to_string(),
|
||||||
)])))
|
)])))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)),
|
snowflake().verified_stmt(&format!("USE SCHEMA {quote}my_schema{quote}")),
|
||||||
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_schema".to_string(),
|
"my_schema".to_string(),
|
||||||
)])))
|
)])))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE SCHEMA {0}CATALOG{0}.{0}my_schema{0}", quote)),
|
snowflake().verified_stmt(&format!(
|
||||||
|
"USE SCHEMA {quote}CATALOG{quote}.{quote}my_schema{quote}"
|
||||||
|
)),
|
||||||
Statement::Use(Use::Schema(ObjectName::from(vec![
|
Statement::Use(Use::Schema(ObjectName::from(vec![
|
||||||
Ident::with_quote(quote, "CATALOG"),
|
Ident::with_quote(quote, "CATALOG"),
|
||||||
Ident::with_quote(quote, "my_schema")
|
Ident::with_quote(quote, "my_schema")
|
||||||
])))
|
])))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE ROLE {0}my_role{0}", quote)),
|
snowflake().verified_stmt(&format!("USE ROLE {quote}my_role{quote}")),
|
||||||
Statement::Use(Use::Role(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Role(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_role".to_string(),
|
"my_role".to_string(),
|
||||||
)])))
|
)])))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE WAREHOUSE {0}my_wh{0}", quote)),
|
snowflake().verified_stmt(&format!("USE WAREHOUSE {quote}my_wh{quote}")),
|
||||||
Statement::Use(Use::Warehouse(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Warehouse(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_wh".to_string(),
|
"my_wh".to_string(),
|
||||||
|
@ -3108,7 +3205,7 @@ fn view_comment_option_should_be_after_column_list() {
|
||||||
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') COMMENT = 'Comment' AS SELECT a FROM t",
|
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') COMMENT = 'Comment' AS SELECT a FROM t",
|
||||||
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') WITH (foo = bar) COMMENT = 'Comment' AS SELECT a FROM t",
|
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') WITH (foo = bar) COMMENT = 'Comment' AS SELECT a FROM t",
|
||||||
] {
|
] {
|
||||||
snowflake_and_generic()
|
snowflake()
|
||||||
.verified_stmt(sql);
|
.verified_stmt(sql);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3117,7 +3214,7 @@ fn view_comment_option_should_be_after_column_list() {
|
||||||
fn parse_view_column_descriptions() {
|
fn parse_view_column_descriptions() {
|
||||||
let sql = "CREATE OR REPLACE VIEW v (a COMMENT 'Comment', b) AS SELECT a, b FROM table1";
|
let sql = "CREATE OR REPLACE VIEW v (a COMMENT 'Comment', b) AS SELECT a, b FROM table1";
|
||||||
|
|
||||||
match snowflake_and_generic().verified_stmt(sql) {
|
match snowflake().verified_stmt(sql) {
|
||||||
Statement::CreateView { name, columns, .. } => {
|
Statement::CreateView { name, columns, .. } => {
|
||||||
assert_eq!(name.to_string(), "v");
|
assert_eq!(name.to_string(), "v");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -3126,7 +3223,9 @@ fn parse_view_column_descriptions() {
|
||||||
ViewColumnDef {
|
ViewColumnDef {
|
||||||
name: Ident::new("a"),
|
name: Ident::new("a"),
|
||||||
data_type: None,
|
data_type: None,
|
||||||
options: Some(vec![ColumnOption::Comment("Comment".to_string())]),
|
options: Some(ColumnOptions::SpaceSeparated(vec![ColumnOption::Comment(
|
||||||
|
"Comment".to_string()
|
||||||
|
)])),
|
||||||
},
|
},
|
||||||
ViewColumnDef {
|
ViewColumnDef {
|
||||||
name: Ident::new("b"),
|
name: Ident::new("b"),
|
||||||
|
@ -3381,10 +3480,38 @@ fn parse_ls_and_rm() {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sql_keywords_as_select_item_ident() {
|
||||||
|
// Some keywords that should be parsed as an alias
|
||||||
|
let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT", "SORT"];
|
||||||
|
for kw in unreserved_kws {
|
||||||
|
snowflake().verified_stmt(&format!("SELECT 1, {kw}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Some keywords that should not be parsed as an alias
|
||||||
|
let reserved_kws = vec![
|
||||||
|
"FROM",
|
||||||
|
"GROUP",
|
||||||
|
"HAVING",
|
||||||
|
"INTERSECT",
|
||||||
|
"INTO",
|
||||||
|
"ORDER",
|
||||||
|
"SELECT",
|
||||||
|
"UNION",
|
||||||
|
"WHERE",
|
||||||
|
"WITH",
|
||||||
|
];
|
||||||
|
for kw in reserved_kws {
|
||||||
|
assert!(snowflake()
|
||||||
|
.parse_sql_statements(&format!("SELECT 1, {kw}"))
|
||||||
|
.is_err());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_sql_keywords_as_select_item_aliases() {
|
fn test_sql_keywords_as_select_item_aliases() {
|
||||||
// Some keywords that should be parsed as an alias
|
// Some keywords that should be parsed as an alias
|
||||||
let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT"];
|
let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT", "SORT"];
|
||||||
for kw in unreserved_kws {
|
for kw in unreserved_kws {
|
||||||
snowflake()
|
snowflake()
|
||||||
.one_statement_parses_to(&format!("SELECT 1 {kw}"), &format!("SELECT 1 AS {kw}"));
|
.one_statement_parses_to(&format!("SELECT 1 {kw}"), &format!("SELECT 1 AS {kw}"));
|
||||||
|
@ -3408,6 +3535,87 @@ fn test_sql_keywords_as_select_item_aliases() {
|
||||||
.parse_sql_statements(&format!("SELECT 1 {kw}"))
|
.parse_sql_statements(&format!("SELECT 1 {kw}"))
|
||||||
.is_err());
|
.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LIMIT is alias
|
||||||
|
snowflake().one_statement_parses_to("SELECT 1 LIMIT", "SELECT 1 AS LIMIT");
|
||||||
|
// LIMIT is not an alias
|
||||||
|
snowflake().verified_stmt("SELECT 1 LIMIT 1");
|
||||||
|
snowflake().verified_stmt("SELECT 1 LIMIT $1");
|
||||||
|
snowflake().verified_stmt("SELECT 1 LIMIT ''");
|
||||||
|
snowflake().verified_stmt("SELECT 1 LIMIT NULL");
|
||||||
|
snowflake().verified_stmt("SELECT 1 LIMIT $$$$");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sql_keywords_as_table_aliases() {
|
||||||
|
// Some keywords that should be parsed as an alias implicitly
|
||||||
|
let unreserved_kws = vec![
|
||||||
|
"VIEW",
|
||||||
|
"EXPLAIN",
|
||||||
|
"ANALYZE",
|
||||||
|
"SORT",
|
||||||
|
"PIVOT",
|
||||||
|
"UNPIVOT",
|
||||||
|
"TOP",
|
||||||
|
"LIMIT",
|
||||||
|
"OFFSET",
|
||||||
|
"FETCH",
|
||||||
|
"EXCEPT",
|
||||||
|
"CLUSTER",
|
||||||
|
"DISTRIBUTE",
|
||||||
|
"GLOBAL",
|
||||||
|
"ANTI",
|
||||||
|
"SEMI",
|
||||||
|
"RETURNING",
|
||||||
|
"OUTER",
|
||||||
|
"WINDOW",
|
||||||
|
"END",
|
||||||
|
"PARTITION",
|
||||||
|
"PREWHERE",
|
||||||
|
"SETTINGS",
|
||||||
|
"FORMAT",
|
||||||
|
"MATCH_RECOGNIZE",
|
||||||
|
"OPEN",
|
||||||
|
];
|
||||||
|
|
||||||
|
for kw in unreserved_kws {
|
||||||
|
snowflake().verified_stmt(&format!("SELECT * FROM tbl AS {kw}"));
|
||||||
|
snowflake().one_statement_parses_to(
|
||||||
|
&format!("SELECT * FROM tbl {kw}"),
|
||||||
|
&format!("SELECT * FROM tbl AS {kw}"),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Some keywords that should not be parsed as an alias implicitly
|
||||||
|
let reserved_kws = vec![
|
||||||
|
"FROM", "GROUP", "HAVING", "ORDER", "SELECT", "UNION", "WHERE", "WITH",
|
||||||
|
];
|
||||||
|
for kw in reserved_kws {
|
||||||
|
assert!(snowflake()
|
||||||
|
.parse_sql_statements(&format!("SELECT * FROM tbl {kw}"))
|
||||||
|
.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
// LIMIT is alias
|
||||||
|
snowflake().one_statement_parses_to("SELECT * FROM tbl LIMIT", "SELECT * FROM tbl AS LIMIT");
|
||||||
|
// LIMIT is not an alias
|
||||||
|
snowflake().verified_stmt("SELECT * FROM tbl LIMIT 1");
|
||||||
|
snowflake().verified_stmt("SELECT * FROM tbl LIMIT $1");
|
||||||
|
snowflake().verified_stmt("SELECT * FROM tbl LIMIT ''");
|
||||||
|
snowflake().verified_stmt("SELECT * FROM tbl LIMIT NULL");
|
||||||
|
snowflake().verified_stmt("SELECT * FROM tbl LIMIT $$$$");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sql_keywords_as_table_factor() {
|
||||||
|
// LIMIT is a table factor, Snowflake does not reserve it
|
||||||
|
snowflake().one_statement_parses_to("SELECT * FROM tbl, LIMIT", "SELECT * FROM tbl, LIMIT");
|
||||||
|
// LIMIT is not a table factor
|
||||||
|
snowflake().one_statement_parses_to("SELECT * FROM tbl, LIMIT 1", "SELECT * FROM tbl LIMIT 1");
|
||||||
|
// ORDER is reserved
|
||||||
|
assert!(snowflake()
|
||||||
|
.parse_sql_statements("SELECT * FROM tbl, order")
|
||||||
|
.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -3611,7 +3819,7 @@ fn test_alter_session_followed_by_statement() {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
match stmts[..] {
|
match stmts[..] {
|
||||||
[Statement::AlterSession { .. }, Statement::Query { .. }] => {}
|
[Statement::AlterSession { .. }, Statement::Query { .. }] => {}
|
||||||
_ => panic!("Unexpected statements: {:?}", stmts),
|
_ => panic!("Unexpected statements: {stmts:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4066,3 +4274,296 @@ fn parse_connect_by_root_operator() {
|
||||||
"sql parser error: Expected an expression, found: FROM"
|
"sql parser error: Expected an expression, found: FROM"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_begin_exception_end() {
|
||||||
|
for sql in [
|
||||||
|
"BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE; END",
|
||||||
|
"BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE EX_1; END",
|
||||||
|
"BEGIN SELECT 1; EXCEPTION WHEN FOO THEN SELECT 2; WHEN OTHER THEN SELECT 3; RAISE; END",
|
||||||
|
"BEGIN BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE; END; END",
|
||||||
|
] {
|
||||||
|
snowflake().verified_stmt(sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
let sql = r#"
|
||||||
|
DECLARE
|
||||||
|
EXCEPTION_1 EXCEPTION (-20001, 'I caught the expected exception.');
|
||||||
|
EXCEPTION_2 EXCEPTION (-20002, 'Not the expected exception!');
|
||||||
|
EXCEPTION_3 EXCEPTION (-20003, 'The worst exception...');
|
||||||
|
BEGIN
|
||||||
|
BEGIN
|
||||||
|
SELECT 1;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN EXCEPTION_1 THEN
|
||||||
|
SELECT 1;
|
||||||
|
WHEN EXCEPTION_2 OR EXCEPTION_3 THEN
|
||||||
|
SELECT 2;
|
||||||
|
SELECT 3;
|
||||||
|
WHEN OTHER THEN
|
||||||
|
SELECT 4;
|
||||||
|
RAISE;
|
||||||
|
END;
|
||||||
|
END
|
||||||
|
"#;
|
||||||
|
|
||||||
|
// Outer `BEGIN` of the two nested `BEGIN` statements.
|
||||||
|
let Statement::StartTransaction { mut statements, .. } = snowflake()
|
||||||
|
.parse_sql_statements(sql)
|
||||||
|
.unwrap()
|
||||||
|
.pop()
|
||||||
|
.unwrap()
|
||||||
|
else {
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
|
||||||
|
// Inner `BEGIN` of the two nested `BEGIN` statements.
|
||||||
|
let Statement::StartTransaction {
|
||||||
|
statements,
|
||||||
|
exception,
|
||||||
|
has_end_keyword,
|
||||||
|
..
|
||||||
|
} = statements.pop().unwrap()
|
||||||
|
else {
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(1, statements.len());
|
||||||
|
assert!(has_end_keyword);
|
||||||
|
|
||||||
|
let exception = exception.unwrap();
|
||||||
|
assert_eq!(3, exception.len());
|
||||||
|
assert_eq!(1, exception[0].idents.len());
|
||||||
|
assert_eq!(1, exception[0].statements.len());
|
||||||
|
assert_eq!(2, exception[1].idents.len());
|
||||||
|
assert_eq!(2, exception[1].statements.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_snowflake_fetch_clause_syntax() {
|
||||||
|
let canonical = "SELECT c1 FROM fetch_test FETCH FIRST 2 ROWS ONLY";
|
||||||
|
snowflake().verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH 2", canonical);
|
||||||
|
|
||||||
|
snowflake()
|
||||||
|
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH FIRST 2", canonical);
|
||||||
|
snowflake()
|
||||||
|
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH NEXT 2", canonical);
|
||||||
|
|
||||||
|
snowflake()
|
||||||
|
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH 2 ROW", canonical);
|
||||||
|
|
||||||
|
snowflake().verified_only_select_with_canonical(
|
||||||
|
"SELECT c1 FROM fetch_test FETCH FIRST 2 ROWS",
|
||||||
|
canonical,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_snowflake_create_view_with_multiple_column_options() {
|
||||||
|
let create_view_with_tag =
|
||||||
|
r#"CREATE VIEW X (COL WITH TAG (pii='email') COMMENT 'foobar') AS SELECT * FROM Y"#;
|
||||||
|
snowflake().verified_stmt(create_view_with_tag);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_snowflake_create_view_with_composite_tag() {
|
||||||
|
let create_view_with_tag =
|
||||||
|
r#"CREATE VIEW X (COL WITH TAG (foo.bar.baz.pii='email')) AS SELECT * FROM Y"#;
|
||||||
|
snowflake().verified_stmt(create_view_with_tag);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_snowflake_create_view_with_composite_policy_name() {
|
||||||
|
let create_view_with_tag =
|
||||||
|
r#"CREATE VIEW X (COL WITH MASKING POLICY foo.bar.baz) AS SELECT * FROM Y"#;
|
||||||
|
snowflake().verified_stmt(create_view_with_tag);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_snowflake_identifier_function() {
|
||||||
|
// Using IDENTIFIER to reference a column
|
||||||
|
match &snowflake()
|
||||||
|
.verified_only_select("SELECT identifier('email') FROM customers")
|
||||||
|
.projection[0]
|
||||||
|
{
|
||||||
|
SelectItem::UnnamedExpr(Expr::Function(Function { name, args, .. })) => {
|
||||||
|
assert_eq!(*name, ObjectName::from(vec![Ident::new("identifier")]));
|
||||||
|
assert_eq!(
|
||||||
|
*args,
|
||||||
|
FunctionArguments::List(FunctionArgumentList {
|
||||||
|
args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value(
|
||||||
|
Value::SingleQuotedString("email".to_string()).into()
|
||||||
|
)))],
|
||||||
|
clauses: vec![],
|
||||||
|
duplicate_treatment: None
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Using IDENTIFIER to reference a case-sensitive column
|
||||||
|
match &snowflake()
|
||||||
|
.verified_only_select(r#"SELECT identifier('"Email"') FROM customers"#)
|
||||||
|
.projection[0]
|
||||||
|
{
|
||||||
|
SelectItem::UnnamedExpr(Expr::Function(Function { name, args, .. })) => {
|
||||||
|
assert_eq!(*name, ObjectName::from(vec![Ident::new("identifier")]));
|
||||||
|
assert_eq!(
|
||||||
|
*args,
|
||||||
|
FunctionArguments::List(FunctionArgumentList {
|
||||||
|
args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value(
|
||||||
|
Value::SingleQuotedString("\"Email\"".to_string()).into()
|
||||||
|
)))],
|
||||||
|
clauses: vec![],
|
||||||
|
duplicate_treatment: None
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Using IDENTIFIER to reference an alias of a table
|
||||||
|
match &snowflake()
|
||||||
|
.verified_only_select("SELECT identifier('alias1').* FROM tbl AS alias1")
|
||||||
|
.projection[0]
|
||||||
|
{
|
||||||
|
SelectItem::QualifiedWildcard(
|
||||||
|
SelectItemQualifiedWildcardKind::Expr(Expr::Function(Function { name, args, .. })),
|
||||||
|
_,
|
||||||
|
) => {
|
||||||
|
assert_eq!(*name, ObjectName::from(vec![Ident::new("identifier")]));
|
||||||
|
assert_eq!(
|
||||||
|
*args,
|
||||||
|
FunctionArguments::List(FunctionArgumentList {
|
||||||
|
args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value(
|
||||||
|
Value::SingleQuotedString("alias1".to_string()).into()
|
||||||
|
)))],
|
||||||
|
clauses: vec![],
|
||||||
|
duplicate_treatment: None
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Using IDENTIFIER to reference a database
|
||||||
|
match snowflake().verified_stmt("CREATE DATABASE IDENTIFIER('tbl')") {
|
||||||
|
Statement::CreateDatabase { db_name, .. } => {
|
||||||
|
assert_eq!(
|
||||||
|
db_name,
|
||||||
|
ObjectName(vec![ObjectNamePart::Function(ObjectNamePartFunction {
|
||||||
|
name: Ident::new("IDENTIFIER"),
|
||||||
|
args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value(
|
||||||
|
Value::SingleQuotedString("tbl".to_string()).into()
|
||||||
|
)))]
|
||||||
|
})])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Using IDENTIFIER to reference a schema
|
||||||
|
match snowflake().verified_stmt("CREATE SCHEMA IDENTIFIER('db1.sc1')") {
|
||||||
|
Statement::CreateSchema { schema_name, .. } => {
|
||||||
|
assert_eq!(
|
||||||
|
schema_name,
|
||||||
|
SchemaName::Simple(ObjectName(vec![ObjectNamePart::Function(
|
||||||
|
ObjectNamePartFunction {
|
||||||
|
name: Ident::new("IDENTIFIER"),
|
||||||
|
args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value(
|
||||||
|
Value::SingleQuotedString("db1.sc1".to_string()).into()
|
||||||
|
)))]
|
||||||
|
}
|
||||||
|
)]))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Using IDENTIFIER to reference a table
|
||||||
|
match snowflake().verified_stmt("CREATE TABLE IDENTIFIER('tbl') (id INT)") {
|
||||||
|
Statement::CreateTable(CreateTable { name, .. }) => {
|
||||||
|
assert_eq!(
|
||||||
|
name,
|
||||||
|
ObjectName(vec![ObjectNamePart::Function(ObjectNamePartFunction {
|
||||||
|
name: Ident::new("IDENTIFIER"),
|
||||||
|
args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value(
|
||||||
|
Value::SingleQuotedString("tbl".to_string()).into()
|
||||||
|
)))]
|
||||||
|
})])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cannot have more than one IDENTIFIER part in an object name
|
||||||
|
assert_eq!(
|
||||||
|
snowflake()
|
||||||
|
.parse_sql_statements(
|
||||||
|
"CREATE TABLE IDENTIFIER('db1').IDENTIFIER('sc1').IDENTIFIER('tbl') (id INT)"
|
||||||
|
)
|
||||||
|
.is_err(),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
snowflake()
|
||||||
|
.parse_sql_statements("CREATE TABLE IDENTIFIER('db1')..IDENTIFIER('tbl') (id INT)")
|
||||||
|
.is_err(),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
|
||||||
|
snowflake().verified_stmt("GRANT ROLE IDENTIFIER('AAA') TO USER IDENTIFIER('AAA')");
|
||||||
|
snowflake().verified_stmt("REVOKE ROLE IDENTIFIER('AAA') FROM USER IDENTIFIER('AAA')");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_create_database() {
|
||||||
|
snowflake().verified_stmt("CREATE DATABASE my_db");
|
||||||
|
snowflake().verified_stmt("CREATE OR REPLACE DATABASE my_db");
|
||||||
|
snowflake().verified_stmt("CREATE TRANSIENT DATABASE IF NOT EXISTS my_db");
|
||||||
|
snowflake().verified_stmt("CREATE DATABASE my_db CLONE src_db");
|
||||||
|
snowflake().verified_stmt(
|
||||||
|
"CREATE OR REPLACE DATABASE my_db CLONE src_db DATA_RETENTION_TIME_IN_DAYS = 1",
|
||||||
|
);
|
||||||
|
snowflake().one_statement_parses_to(
|
||||||
|
r#"
|
||||||
|
CREATE OR REPLACE TRANSIENT DATABASE IF NOT EXISTS my_db
|
||||||
|
CLONE src_db
|
||||||
|
DATA_RETENTION_TIME_IN_DAYS = 1
|
||||||
|
MAX_DATA_EXTENSION_TIME_IN_DAYS = 5
|
||||||
|
EXTERNAL_VOLUME = 'volume1'
|
||||||
|
CATALOG = 'my_catalog'
|
||||||
|
REPLACE_INVALID_CHARACTERS = TRUE
|
||||||
|
DEFAULT_DDL_COLLATION = 'en-ci'
|
||||||
|
STORAGE_SERIALIZATION_POLICY = COMPATIBLE
|
||||||
|
COMMENT = 'This is my database'
|
||||||
|
CATALOG_SYNC = 'sync_integration'
|
||||||
|
CATALOG_SYNC_NAMESPACE_MODE = NEST
|
||||||
|
CATALOG_SYNC_NAMESPACE_FLATTEN_DELIMITER = '/'
|
||||||
|
WITH TAG (env = 'prod', team = 'data')
|
||||||
|
WITH CONTACT (owner = 'admin', dpo = 'compliance')
|
||||||
|
"#,
|
||||||
|
"CREATE OR REPLACE TRANSIENT DATABASE IF NOT EXISTS \
|
||||||
|
my_db CLONE src_db DATA_RETENTION_TIME_IN_DAYS = 1 MAX_DATA_EXTENSION_TIME_IN_DAYS = 5 \
|
||||||
|
EXTERNAL_VOLUME = 'volume1' CATALOG = 'my_catalog' \
|
||||||
|
REPLACE_INVALID_CHARACTERS = TRUE DEFAULT_DDL_COLLATION = 'en-ci' \
|
||||||
|
STORAGE_SERIALIZATION_POLICY = COMPATIBLE COMMENT = 'This is my database' \
|
||||||
|
CATALOG_SYNC = 'sync_integration' CATALOG_SYNC_NAMESPACE_MODE = NEST \
|
||||||
|
CATALOG_SYNC_NAMESPACE_FLATTEN_DELIMITER = '/' \
|
||||||
|
WITH TAG (env='prod', team='data') \
|
||||||
|
WITH CONTACT (owner = admin, dpo = compliance)",
|
||||||
|
);
|
||||||
|
|
||||||
|
let err = snowflake()
|
||||||
|
.parse_sql_statements("CREATE DATABASE")
|
||||||
|
.unwrap_err()
|
||||||
|
.to_string();
|
||||||
|
assert!(err.contains("Expected"), "Unexpected error: {err}");
|
||||||
|
|
||||||
|
let err = snowflake()
|
||||||
|
.parse_sql_statements("CREATE DATABASE my_db CLONE")
|
||||||
|
.unwrap_err()
|
||||||
|
.to_string();
|
||||||
|
assert!(err.contains("Expected"), "Unexpected error: {err}");
|
||||||
|
}
|
||||||
|
|
|
@ -324,7 +324,7 @@ fn parse_create_table_on_conflict_col() {
|
||||||
Keyword::IGNORE,
|
Keyword::IGNORE,
|
||||||
Keyword::REPLACE,
|
Keyword::REPLACE,
|
||||||
] {
|
] {
|
||||||
let sql = format!("CREATE TABLE t1 (a INT, b INT ON CONFLICT {:?})", keyword);
|
let sql = format!("CREATE TABLE t1 (a INT, b INT ON CONFLICT {keyword:?})");
|
||||||
match sqlite_and_generic().verified_stmt(&sql) {
|
match sqlite_and_generic().verified_stmt(&sql) {
|
||||||
Statement::CreateTable(CreateTable { columns, .. }) => {
|
Statement::CreateTable(CreateTable { columns, .. }) => {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -410,7 +410,7 @@ fn parse_window_function_with_filter() {
|
||||||
"count",
|
"count",
|
||||||
"user_defined_function",
|
"user_defined_function",
|
||||||
] {
|
] {
|
||||||
let sql = format!("SELECT {}(x) FILTER (WHERE y) OVER () FROM t", func_name);
|
let sql = format!("SELECT {func_name}(x) FILTER (WHERE y) OVER () FROM t");
|
||||||
let select = sqlite().verified_only_select(&sql);
|
let select = sqlite().verified_only_select(&sql);
|
||||||
assert_eq!(select.to_string(), sql);
|
assert_eq!(select.to_string(), sql);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -444,7 +444,7 @@ fn parse_window_function_with_filter() {
|
||||||
fn parse_attach_database() {
|
fn parse_attach_database() {
|
||||||
let sql = "ATTACH DATABASE 'test.db' AS test";
|
let sql = "ATTACH DATABASE 'test.db' AS test";
|
||||||
let verified_stmt = sqlite().verified_stmt(sql);
|
let verified_stmt = sqlite().verified_stmt(sql);
|
||||||
assert_eq!(sql, format!("{}", verified_stmt));
|
assert_eq!(sql, format!("{verified_stmt}"));
|
||||||
match verified_stmt {
|
match verified_stmt {
|
||||||
Statement::AttachDatabase {
|
Statement::AttachDatabase {
|
||||||
schema_name,
|
schema_name,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue