mirror of
https://github.com/apache/datafusion-sqlparser-rs.git
synced 2025-07-07 17:04:59 +00:00
Merge remote-tracking branch 'origin' into support-more-pipe-operators
This commit is contained in:
commit
e064650466
40 changed files with 1912 additions and 606 deletions
39
.github/workflows/license.yml
vendored
Normal file
39
.github/workflows/license.yml
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
name: license
|
||||
|
||||
# trigger for all PRs and changes to main
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
|
||||
rat:
|
||||
name: Release Audit Tool (RAT)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: Audit licenses
|
||||
run: ./dev/release/run-rat.sh .
|
10
.github/workflows/rust.yml
vendored
10
.github/workflows/rust.yml
vendored
|
@ -19,6 +19,9 @@ name: Rust
|
|||
|
||||
on: [push, pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
|
||||
codestyle:
|
||||
|
@ -85,11 +88,8 @@ jobs:
|
|||
uses: ./.github/actions/setup-builder
|
||||
with:
|
||||
rust-version: ${{ matrix.rust }}
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Install Tarpaulin
|
||||
uses: actions-rs/install@v0.1
|
||||
with:
|
||||
crate: cargo-tarpaulin
|
||||
version: 0.14.2
|
||||
use-tool-cache: true
|
||||
run: cargo install cargo-tarpaulin
|
||||
- name: Test
|
||||
run: cargo test --all-features
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
[package]
|
||||
name = "sqlparser"
|
||||
description = "Extensible SQL Lexer and Parser with support for ANSI SQL:2011"
|
||||
version = "0.56.0"
|
||||
version = "0.57.0"
|
||||
authors = ["Apache DataFusion <dev@datafusion.apache.org>"]
|
||||
homepage = "https://github.com/apache/datafusion-sqlparser-rs"
|
||||
documentation = "https://docs.rs/sqlparser/"
|
||||
|
|
95
changelog/0.57.0.md
Normal file
95
changelog/0.57.0.md
Normal file
|
@ -0,0 +1,95 @@
|
|||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# sqlparser-rs 0.57.0 Changelog
|
||||
|
||||
This release consists of 39 commits from 19 contributors. See credits at the end of this changelog for more information.
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- feat: Hive: support `SORT BY` direction [#1873](https://github.com/apache/datafusion-sqlparser-rs/pull/1873) (chenkovsky)
|
||||
|
||||
**Other:**
|
||||
|
||||
- Support some of pipe operators [#1759](https://github.com/apache/datafusion-sqlparser-rs/pull/1759) (simonvandel)
|
||||
- Added support for `DROP DOMAIN` [#1828](https://github.com/apache/datafusion-sqlparser-rs/pull/1828) (LucaCappelletti94)
|
||||
- Improve support for cursors for SQL Server [#1831](https://github.com/apache/datafusion-sqlparser-rs/pull/1831) (aharpervc)
|
||||
- Add all missing table options to be handled in any order [#1747](https://github.com/apache/datafusion-sqlparser-rs/pull/1747) (benrsatori)
|
||||
- Add `CREATE TRIGGER` support for SQL Server [#1810](https://github.com/apache/datafusion-sqlparser-rs/pull/1810) (aharpervc)
|
||||
- Added support for `CREATE DOMAIN` [#1830](https://github.com/apache/datafusion-sqlparser-rs/pull/1830) (LucaCappelletti94)
|
||||
- Allow stored procedures to be defined without `BEGIN`/`END` [#1834](https://github.com/apache/datafusion-sqlparser-rs/pull/1834) (aharpervc)
|
||||
- Add support for the MATCH and REGEXP binary operators [#1840](https://github.com/apache/datafusion-sqlparser-rs/pull/1840) (lovasoa)
|
||||
- Fix: parsing ident starting with underscore in certain dialects [#1835](https://github.com/apache/datafusion-sqlparser-rs/pull/1835) (MohamedAbdeen21)
|
||||
- implement pretty-printing with `{:#}` [#1847](https://github.com/apache/datafusion-sqlparser-rs/pull/1847) (lovasoa)
|
||||
- Fix big performance issue in string serialization [#1848](https://github.com/apache/datafusion-sqlparser-rs/pull/1848) (lovasoa)
|
||||
- Add support for `DENY` statements [#1836](https://github.com/apache/datafusion-sqlparser-rs/pull/1836) (aharpervc)
|
||||
- Postgresql: Add `REPLICA IDENTITY` operation for `ALTER TABLE` [#1844](https://github.com/apache/datafusion-sqlparser-rs/pull/1844) (MohamedAbdeen21)
|
||||
- Add support for INCLUDE/EXCLUDE NULLS for UNPIVOT [#1849](https://github.com/apache/datafusion-sqlparser-rs/pull/1849) (Vedin)
|
||||
- pretty print improvements [#1851](https://github.com/apache/datafusion-sqlparser-rs/pull/1851) (lovasoa)
|
||||
- fix new rust 1.87 cargo clippy warnings [#1856](https://github.com/apache/datafusion-sqlparser-rs/pull/1856) (lovasoa)
|
||||
- Update criterion requirement from 0.5 to 0.6 in /sqlparser_bench [#1857](https://github.com/apache/datafusion-sqlparser-rs/pull/1857) (dependabot[bot])
|
||||
- pretty-print CREATE TABLE statements [#1854](https://github.com/apache/datafusion-sqlparser-rs/pull/1854) (lovasoa)
|
||||
- pretty-print CREATE VIEW statements [#1855](https://github.com/apache/datafusion-sqlparser-rs/pull/1855) (lovasoa)
|
||||
- Handle optional datatypes properly in `CREATE FUNCTION` statements [#1826](https://github.com/apache/datafusion-sqlparser-rs/pull/1826) (LucaCappelletti94)
|
||||
- Mysql: Add `SRID` column option [#1852](https://github.com/apache/datafusion-sqlparser-rs/pull/1852) (MohamedAbdeen21)
|
||||
- Add support for table valued functions for SQL Server [#1839](https://github.com/apache/datafusion-sqlparser-rs/pull/1839) (aharpervc)
|
||||
- Keep the COLUMN keyword only if it exists when dropping the column [#1862](https://github.com/apache/datafusion-sqlparser-rs/pull/1862) (git-hulk)
|
||||
- Add support for parameter default values in SQL Server [#1866](https://github.com/apache/datafusion-sqlparser-rs/pull/1866) (aharpervc)
|
||||
- Add support for `TABLESAMPLE` pipe operator [#1860](https://github.com/apache/datafusion-sqlparser-rs/pull/1860) (hendrikmakait)
|
||||
- Adds support for mysql's drop index [#1864](https://github.com/apache/datafusion-sqlparser-rs/pull/1864) (dmzmk)
|
||||
- Fix: GROUPING SETS accept values without parenthesis [#1867](https://github.com/apache/datafusion-sqlparser-rs/pull/1867) (Vedin)
|
||||
- Add ICEBERG keyword support to ALTER TABLE statement [#1869](https://github.com/apache/datafusion-sqlparser-rs/pull/1869) (osipovartem)
|
||||
- MySQL: Support `index_name` in FK constraints [#1871](https://github.com/apache/datafusion-sqlparser-rs/pull/1871) (MohamedAbdeen21)
|
||||
- Postgres: Apply `ONLY` keyword per table in TRUNCATE stmt [#1872](https://github.com/apache/datafusion-sqlparser-rs/pull/1872) (MohamedAbdeen21)
|
||||
- Fix `CASE` expression spans [#1874](https://github.com/apache/datafusion-sqlparser-rs/pull/1874) (eliaperantoni)
|
||||
- MySQL: `[[NOT] ENFORCED]` in CHECK constraint [#1870](https://github.com/apache/datafusion-sqlparser-rs/pull/1870) (MohamedAbdeen21)
|
||||
- Add support for `CREATE SCHEMA WITH ( <properties> )` [#1877](https://github.com/apache/datafusion-sqlparser-rs/pull/1877) (utay)
|
||||
- Add support for `ALTER TABLE DROP INDEX` [#1865](https://github.com/apache/datafusion-sqlparser-rs/pull/1865) (vimko)
|
||||
- chore: Replace archived actions-rs/install action [#1876](https://github.com/apache/datafusion-sqlparser-rs/pull/1876) (assignUser)
|
||||
- Allow `IF NOT EXISTS` after table name for Snowflake [#1881](https://github.com/apache/datafusion-sqlparser-rs/pull/1881) (bombsimon)
|
||||
- Support `DISTINCT AS { STRUCT | VALUE }` for BigQuery [#1880](https://github.com/apache/datafusion-sqlparser-rs/pull/1880) (bombsimon)
|
||||
|
||||
## Credits
|
||||
|
||||
Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor.
|
||||
|
||||
```
|
||||
7 Ophir LOJKINE
|
||||
6 Andrew Harper
|
||||
6 Mohamed Abdeen
|
||||
3 Luca Cappelletti
|
||||
2 Denys Tsomenko
|
||||
2 Simon Sawert
|
||||
1 Andrew Lamb
|
||||
1 Artem Osipov
|
||||
1 Chen Chongchen
|
||||
1 Dmitriy Mazurin
|
||||
1 Elia Perantoni
|
||||
1 Hendrik Makait
|
||||
1 Jacob Wujciak-Jens
|
||||
1 Simon Vandel Sillesen
|
||||
1 Yannick Utard
|
||||
1 benrsatori
|
||||
1 dependabot[bot]
|
||||
1 hulk
|
||||
1 vimko
|
||||
```
|
||||
|
||||
Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release.
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
# Files to exclude from the Apache Rat (license) check
|
||||
.gitignore
|
||||
.tool-versions
|
||||
target/*
|
||||
**.gitignore
|
||||
rat.txt
|
||||
dev/release/rat_exclude_files.txt
|
||||
fuzz/.gitignore
|
||||
sqlparser_bench/img/flamegraph.svg
|
||||
|
||||
**Cargo.lock
|
||||
filtered_rat.txt
|
||||
|
|
|
@ -63,7 +63,7 @@ $ cargo run --example cli - [--dialectname]
|
|||
};
|
||||
|
||||
let contents = if filename == "-" {
|
||||
println!("Parsing from stdin using {:?}", dialect);
|
||||
println!("Parsing from stdin using {dialect:?}");
|
||||
let mut buf = Vec::new();
|
||||
stdin()
|
||||
.read_to_end(&mut buf)
|
||||
|
|
|
@ -45,25 +45,24 @@ fn basic_queries(c: &mut Criterion) {
|
|||
|
||||
let large_statement = {
|
||||
let expressions = (0..1000)
|
||||
.map(|n| format!("FN_{}(COL_{})", n, n))
|
||||
.map(|n| format!("FN_{n}(COL_{n})"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
let tables = (0..1000)
|
||||
.map(|n| format!("TABLE_{}", n))
|
||||
.map(|n| format!("TABLE_{n}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" JOIN ");
|
||||
let where_condition = (0..1000)
|
||||
.map(|n| format!("COL_{} = {}", n, n))
|
||||
.map(|n| format!("COL_{n} = {n}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" OR ");
|
||||
let order_condition = (0..1000)
|
||||
.map(|n| format!("COL_{} DESC", n))
|
||||
.map(|n| format!("COL_{n} DESC"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
|
||||
format!(
|
||||
"SELECT {} FROM {} WHERE {} ORDER BY {}",
|
||||
expressions, tables, where_condition, order_condition
|
||||
"SELECT {expressions} FROM {tables} WHERE {where_condition} ORDER BY {order_condition}"
|
||||
)
|
||||
};
|
||||
|
||||
|
|
|
@ -446,6 +446,14 @@ pub enum DataType {
|
|||
///
|
||||
/// [PostgreSQL]: https://www.postgresql.org/docs/9.5/functions-geometry.html
|
||||
GeometricType(GeometricTypeKind),
|
||||
/// PostgreSQL text search vectors, see [PostgreSQL].
|
||||
///
|
||||
/// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-textsearch.html
|
||||
TsVector,
|
||||
/// PostgreSQL text search query, see [PostgreSQL].
|
||||
///
|
||||
/// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-textsearch.html
|
||||
TsQuery,
|
||||
}
|
||||
|
||||
impl fmt::Display for DataType {
|
||||
|
@ -658,7 +666,7 @@ impl fmt::Display for DataType {
|
|||
}
|
||||
DataType::Enum(vals, bits) => {
|
||||
match bits {
|
||||
Some(bits) => write!(f, "ENUM{}", bits),
|
||||
Some(bits) => write!(f, "ENUM{bits}"),
|
||||
None => write!(f, "ENUM"),
|
||||
}?;
|
||||
write!(f, "(")?;
|
||||
|
@ -706,16 +714,16 @@ impl fmt::Display for DataType {
|
|||
}
|
||||
// ClickHouse
|
||||
DataType::Nullable(data_type) => {
|
||||
write!(f, "Nullable({})", data_type)
|
||||
write!(f, "Nullable({data_type})")
|
||||
}
|
||||
DataType::FixedString(character_length) => {
|
||||
write!(f, "FixedString({})", character_length)
|
||||
write!(f, "FixedString({character_length})")
|
||||
}
|
||||
DataType::LowCardinality(data_type) => {
|
||||
write!(f, "LowCardinality({})", data_type)
|
||||
write!(f, "LowCardinality({data_type})")
|
||||
}
|
||||
DataType::Map(key_data_type, value_data_type) => {
|
||||
write!(f, "Map({}, {})", key_data_type, value_data_type)
|
||||
write!(f, "Map({key_data_type}, {value_data_type})")
|
||||
}
|
||||
DataType::Tuple(fields) => {
|
||||
write!(f, "Tuple({})", display_comma_separated(fields))
|
||||
|
@ -737,7 +745,9 @@ impl fmt::Display for DataType {
|
|||
DataType::NamedTable { name, columns } => {
|
||||
write!(f, "{} TABLE ({})", name, display_comma_separated(columns))
|
||||
}
|
||||
DataType::GeometricType(kind) => write!(f, "{}", kind),
|
||||
DataType::GeometricType(kind) => write!(f, "{kind}"),
|
||||
DataType::TsVector => write!(f, "TSVECTOR"),
|
||||
DataType::TsQuery => write!(f, "TSQUERY"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -932,7 +942,7 @@ impl fmt::Display for CharacterLength {
|
|||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
CharacterLength::IntegerLength { length, unit } => {
|
||||
write!(f, "{}", length)?;
|
||||
write!(f, "{length}")?;
|
||||
if let Some(unit) = unit {
|
||||
write!(f, " {unit}")?;
|
||||
}
|
||||
|
@ -987,7 +997,7 @@ impl fmt::Display for BinaryLength {
|
|||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
BinaryLength::IntegerLength { length } => {
|
||||
write!(f, "{}", length)?;
|
||||
write!(f, "{length}")?;
|
||||
}
|
||||
BinaryLength::Max => {
|
||||
write!(f, "MAX")?;
|
||||
|
|
|
@ -173,7 +173,7 @@ impl fmt::Display for AlterRoleOperation {
|
|||
in_database,
|
||||
} => {
|
||||
if let Some(database_name) = in_database {
|
||||
write!(f, "IN DATABASE {} ", database_name)?;
|
||||
write!(f, "IN DATABASE {database_name} ")?;
|
||||
}
|
||||
|
||||
match config_value {
|
||||
|
@ -187,7 +187,7 @@ impl fmt::Display for AlterRoleOperation {
|
|||
in_database,
|
||||
} => {
|
||||
if let Some(database_name) = in_database {
|
||||
write!(f, "IN DATABASE {} ", database_name)?;
|
||||
write!(f, "IN DATABASE {database_name} ")?;
|
||||
}
|
||||
|
||||
match config_name {
|
||||
|
@ -218,15 +218,15 @@ impl fmt::Display for Use {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str("USE ")?;
|
||||
match self {
|
||||
Use::Catalog(name) => write!(f, "CATALOG {}", name),
|
||||
Use::Schema(name) => write!(f, "SCHEMA {}", name),
|
||||
Use::Database(name) => write!(f, "DATABASE {}", name),
|
||||
Use::Warehouse(name) => write!(f, "WAREHOUSE {}", name),
|
||||
Use::Role(name) => write!(f, "ROLE {}", name),
|
||||
Use::Catalog(name) => write!(f, "CATALOG {name}"),
|
||||
Use::Schema(name) => write!(f, "SCHEMA {name}"),
|
||||
Use::Database(name) => write!(f, "DATABASE {name}"),
|
||||
Use::Warehouse(name) => write!(f, "WAREHOUSE {name}"),
|
||||
Use::Role(name) => write!(f, "ROLE {name}"),
|
||||
Use::SecondaryRoles(secondary_roles) => {
|
||||
write!(f, "SECONDARY ROLES {}", secondary_roles)
|
||||
write!(f, "SECONDARY ROLES {secondary_roles}")
|
||||
}
|
||||
Use::Object(name) => write!(f, "{}", name),
|
||||
Use::Object(name) => write!(f, "{name}"),
|
||||
Use::Default => write!(f, "DEFAULT"),
|
||||
}
|
||||
}
|
||||
|
|
111
src/ast/ddl.rs
111
src/ast/ddl.rs
|
@ -30,11 +30,11 @@ use sqlparser_derive::{Visit, VisitMut};
|
|||
|
||||
use crate::ast::value::escape_single_quote_string;
|
||||
use crate::ast::{
|
||||
display_comma_separated, display_separated, CommentDef, CreateFunctionBody,
|
||||
display_comma_separated, display_separated, ArgMode, CommentDef, CreateFunctionBody,
|
||||
CreateFunctionUsing, DataType, Expr, FunctionBehavior, FunctionCalledOnNull,
|
||||
FunctionDeterminismSpecifier, FunctionParallel, Ident, MySQLColumnPosition, ObjectName,
|
||||
OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag, Value,
|
||||
ValueWithSpan,
|
||||
FunctionDeterminismSpecifier, FunctionParallel, Ident, IndexColumn, MySQLColumnPosition,
|
||||
ObjectName, OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag,
|
||||
Value, ValueWithSpan,
|
||||
};
|
||||
use crate::keywords::Keyword;
|
||||
use crate::tokenizer::Token;
|
||||
|
@ -57,7 +57,7 @@ impl fmt::Display for ReplicaIdentity {
|
|||
ReplicaIdentity::None => f.write_str("NONE"),
|
||||
ReplicaIdentity::Full => f.write_str("FULL"),
|
||||
ReplicaIdentity::Default => f.write_str("DEFAULT"),
|
||||
ReplicaIdentity::Index(idx) => write!(f, "USING INDEX {}", idx),
|
||||
ReplicaIdentity::Index(idx) => write!(f, "USING INDEX {idx}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -187,6 +187,12 @@ pub enum AlterTableOperation {
|
|||
DropForeignKey {
|
||||
name: Ident,
|
||||
},
|
||||
/// `DROP INDEX <index_name>`
|
||||
///
|
||||
/// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html
|
||||
DropIndex {
|
||||
name: Ident,
|
||||
},
|
||||
/// `ENABLE ALWAYS RULE rewrite_rule_name`
|
||||
///
|
||||
/// Note: this is a PostgreSQL-specific operation.
|
||||
|
@ -444,7 +450,7 @@ pub enum Owner {
|
|||
impl fmt::Display for Owner {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Owner::Ident(ident) => write!(f, "{}", ident),
|
||||
Owner::Ident(ident) => write!(f, "{ident}"),
|
||||
Owner::CurrentRole => write!(f, "CURRENT_ROLE"),
|
||||
Owner::CurrentUser => write!(f, "CURRENT_USER"),
|
||||
Owner::SessionUser => write!(f, "SESSION_USER"),
|
||||
|
@ -519,7 +525,7 @@ impl fmt::Display for AlterTableOperation {
|
|||
if *if_not_exists {
|
||||
write!(f, " IF NOT EXISTS")?;
|
||||
}
|
||||
write!(f, " {} ({})", name, query)
|
||||
write!(f, " {name} ({query})")
|
||||
}
|
||||
AlterTableOperation::Algorithm { equals, algorithm } => {
|
||||
write!(
|
||||
|
@ -534,7 +540,7 @@ impl fmt::Display for AlterTableOperation {
|
|||
if *if_exists {
|
||||
write!(f, " IF EXISTS")?;
|
||||
}
|
||||
write!(f, " {}", name)
|
||||
write!(f, " {name}")
|
||||
}
|
||||
AlterTableOperation::MaterializeProjection {
|
||||
if_exists,
|
||||
|
@ -545,9 +551,9 @@ impl fmt::Display for AlterTableOperation {
|
|||
if *if_exists {
|
||||
write!(f, " IF EXISTS")?;
|
||||
}
|
||||
write!(f, " {}", name)?;
|
||||
write!(f, " {name}")?;
|
||||
if let Some(partition) = partition {
|
||||
write!(f, " IN PARTITION {}", partition)?;
|
||||
write!(f, " IN PARTITION {partition}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -560,9 +566,9 @@ impl fmt::Display for AlterTableOperation {
|
|||
if *if_exists {
|
||||
write!(f, " IF EXISTS")?;
|
||||
}
|
||||
write!(f, " {}", name)?;
|
||||
write!(f, " {name}")?;
|
||||
if let Some(partition) = partition {
|
||||
write!(f, " IN PARTITION {}", partition)?;
|
||||
write!(f, " IN PARTITION {partition}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -606,6 +612,7 @@ impl fmt::Display for AlterTableOperation {
|
|||
}
|
||||
AlterTableOperation::DropPrimaryKey => write!(f, "DROP PRIMARY KEY"),
|
||||
AlterTableOperation::DropForeignKey { name } => write!(f, "DROP FOREIGN KEY {name}"),
|
||||
AlterTableOperation::DropIndex { name } => write!(f, "DROP INDEX {name}"),
|
||||
AlterTableOperation::DropColumn {
|
||||
has_column_keyword,
|
||||
column_name,
|
||||
|
@ -972,7 +979,7 @@ pub enum TableConstraint {
|
|||
/// [1]: IndexType
|
||||
index_type: Option<IndexType>,
|
||||
/// Identifiers of the columns that are unique.
|
||||
columns: Vec<Ident>,
|
||||
columns: Vec<IndexColumn>,
|
||||
index_options: Vec<IndexOption>,
|
||||
characteristics: Option<ConstraintCharacteristics>,
|
||||
/// Optional Postgres nulls handling: `[ NULLS [ NOT ] DISTINCT ]`
|
||||
|
@ -1008,7 +1015,7 @@ pub enum TableConstraint {
|
|||
/// [1]: IndexType
|
||||
index_type: Option<IndexType>,
|
||||
/// Identifiers of the columns that form the primary key.
|
||||
columns: Vec<Ident>,
|
||||
columns: Vec<IndexColumn>,
|
||||
index_options: Vec<IndexOption>,
|
||||
characteristics: Option<ConstraintCharacteristics>,
|
||||
},
|
||||
|
@ -1019,6 +1026,9 @@ pub enum TableConstraint {
|
|||
/// }`).
|
||||
ForeignKey {
|
||||
name: Option<Ident>,
|
||||
/// MySQL-specific field
|
||||
/// <https://dev.mysql.com/doc/refman/8.4/en/create-table-foreign-keys.html>
|
||||
index_name: Option<Ident>,
|
||||
columns: Vec<Ident>,
|
||||
foreign_table: ObjectName,
|
||||
referred_columns: Vec<Ident>,
|
||||
|
@ -1026,10 +1036,13 @@ pub enum TableConstraint {
|
|||
on_update: Option<ReferentialAction>,
|
||||
characteristics: Option<ConstraintCharacteristics>,
|
||||
},
|
||||
/// `[ CONSTRAINT <name> ] CHECK (<expr>)`
|
||||
/// `[ CONSTRAINT <name> ] CHECK (<expr>) [[NOT] ENFORCED]`
|
||||
Check {
|
||||
name: Option<Ident>,
|
||||
expr: Box<Expr>,
|
||||
/// MySQL-specific syntax
|
||||
/// <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
|
||||
enforced: Option<bool>,
|
||||
},
|
||||
/// MySQLs [index definition][1] for index creation. Not present on ANSI so, for now, the usage
|
||||
/// is restricted to MySQL, as no other dialects that support this syntax were found.
|
||||
|
@ -1047,7 +1060,7 @@ pub enum TableConstraint {
|
|||
/// [1]: IndexType
|
||||
index_type: Option<IndexType>,
|
||||
/// Referred column identifier list.
|
||||
columns: Vec<Ident>,
|
||||
columns: Vec<IndexColumn>,
|
||||
},
|
||||
/// MySQLs [fulltext][1] definition. Since the [`SPATIAL`][2] definition is exactly the same,
|
||||
/// and MySQL displays both the same way, it is part of this definition as well.
|
||||
|
@ -1070,7 +1083,7 @@ pub enum TableConstraint {
|
|||
/// Optional index name.
|
||||
opt_index_name: Option<Ident>,
|
||||
/// Referred column identifier list.
|
||||
columns: Vec<Ident>,
|
||||
columns: Vec<IndexColumn>,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -1129,6 +1142,7 @@ impl fmt::Display for TableConstraint {
|
|||
}
|
||||
TableConstraint::ForeignKey {
|
||||
name,
|
||||
index_name,
|
||||
columns,
|
||||
foreign_table,
|
||||
referred_columns,
|
||||
|
@ -1138,8 +1152,9 @@ impl fmt::Display for TableConstraint {
|
|||
} => {
|
||||
write!(
|
||||
f,
|
||||
"{}FOREIGN KEY ({}) REFERENCES {}",
|
||||
"{}FOREIGN KEY{} ({}) REFERENCES {}",
|
||||
display_constraint_name(name),
|
||||
display_option_spaced(index_name),
|
||||
display_comma_separated(columns),
|
||||
foreign_table,
|
||||
)?;
|
||||
|
@ -1153,12 +1168,21 @@ impl fmt::Display for TableConstraint {
|
|||
write!(f, " ON UPDATE {action}")?;
|
||||
}
|
||||
if let Some(characteristics) = characteristics {
|
||||
write!(f, " {}", characteristics)?;
|
||||
write!(f, " {characteristics}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
TableConstraint::Check { name, expr } => {
|
||||
write!(f, "{}CHECK ({})", display_constraint_name(name), expr)
|
||||
TableConstraint::Check {
|
||||
name,
|
||||
expr,
|
||||
enforced,
|
||||
} => {
|
||||
write!(f, "{}CHECK ({})", display_constraint_name(name), expr)?;
|
||||
if let Some(b) = enforced {
|
||||
write!(f, " {}", if *b { "ENFORCED" } else { "NOT ENFORCED" })
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
TableConstraint::Index {
|
||||
display_as_key,
|
||||
|
@ -1284,7 +1308,7 @@ impl fmt::Display for IndexType {
|
|||
Self::SPGiST => write!(f, "SPGIST"),
|
||||
Self::BRIN => write!(f, "BRIN"),
|
||||
Self::Bloom => write!(f, "BLOOM"),
|
||||
Self::Custom(name) => write!(f, "{}", name),
|
||||
Self::Custom(name) => write!(f, "{name}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1343,11 +1367,16 @@ impl fmt::Display for NullsDistinctOption {
|
|||
pub struct ProcedureParam {
|
||||
pub name: Ident,
|
||||
pub data_type: DataType,
|
||||
pub mode: Option<ArgMode>,
|
||||
}
|
||||
|
||||
impl fmt::Display for ProcedureParam {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{} {}", self.name, self.data_type)
|
||||
if let Some(mode) = &self.mode {
|
||||
write!(f, "{mode} {} {}", self.name, self.data_type)
|
||||
} else {
|
||||
write!(f, "{} {}", self.name, self.data_type)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1397,17 +1426,41 @@ impl fmt::Display for ColumnDef {
|
|||
pub struct ViewColumnDef {
|
||||
pub name: Ident,
|
||||
pub data_type: Option<DataType>,
|
||||
pub options: Option<Vec<ColumnOption>>,
|
||||
pub options: Option<ColumnOptions>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum ColumnOptions {
|
||||
CommaSeparated(Vec<ColumnOption>),
|
||||
SpaceSeparated(Vec<ColumnOption>),
|
||||
}
|
||||
|
||||
impl ColumnOptions {
|
||||
pub fn as_slice(&self) -> &[ColumnOption] {
|
||||
match self {
|
||||
ColumnOptions::CommaSeparated(options) => options.as_slice(),
|
||||
ColumnOptions::SpaceSeparated(options) => options.as_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ViewColumnDef {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.name)?;
|
||||
if let Some(data_type) = self.data_type.as_ref() {
|
||||
write!(f, " {}", data_type)?;
|
||||
write!(f, " {data_type}")?;
|
||||
}
|
||||
if let Some(options) = self.options.as_ref() {
|
||||
write!(f, " {}", display_comma_separated(options.as_slice()))?;
|
||||
match options {
|
||||
ColumnOptions::CommaSeparated(column_options) => {
|
||||
write!(f, " {}", display_comma_separated(column_options.as_slice()))?;
|
||||
}
|
||||
ColumnOptions::SpaceSeparated(column_options) => {
|
||||
write!(f, " {}", display_separated(column_options.as_slice(), " "))?
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1792,7 +1845,7 @@ impl fmt::Display for ColumnOption {
|
|||
} => {
|
||||
write!(f, "{}", if *is_primary { "PRIMARY KEY" } else { "UNIQUE" })?;
|
||||
if let Some(characteristics) = characteristics {
|
||||
write!(f, " {}", characteristics)?;
|
||||
write!(f, " {characteristics}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1814,7 +1867,7 @@ impl fmt::Display for ColumnOption {
|
|||
write!(f, " ON UPDATE {action}")?;
|
||||
}
|
||||
if let Some(characteristics) = characteristics {
|
||||
write!(f, " {}", characteristics)?;
|
||||
write!(f, " {characteristics}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1874,7 +1927,7 @@ impl fmt::Display for ColumnOption {
|
|||
write!(f, "{parameters}")
|
||||
}
|
||||
OnConflict(keyword) => {
|
||||
write!(f, "ON CONFLICT {:?}", keyword)?;
|
||||
write!(f, "ON CONFLICT {keyword:?}")?;
|
||||
Ok(())
|
||||
}
|
||||
Policy(parameters) => {
|
||||
|
|
|
@ -55,7 +55,7 @@ impl Display for IndexColumn {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.column)?;
|
||||
if let Some(operator_class) = &self.operator_class {
|
||||
write!(f, " {}", operator_class)?;
|
||||
write!(f, " {operator_class}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -175,7 +175,9 @@ pub struct CreateTable {
|
|||
pub partition_by: Option<Box<Expr>>,
|
||||
/// BigQuery: Table clustering column list.
|
||||
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#table_option_list>
|
||||
pub cluster_by: Option<WrappedCollection<Vec<Ident>>>,
|
||||
/// Snowflake: Table clustering list which contains base column, expressions on base columns.
|
||||
/// <https://docs.snowflake.com/en/user-guide/tables-clustering-keys#defining-a-clustering-key-for-a-table>
|
||||
pub cluster_by: Option<WrappedCollection<Vec<Expr>>>,
|
||||
/// Hive: Table clustering column list.
|
||||
/// <https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable>
|
||||
pub clustered_by: Option<ClusteredBy>,
|
||||
|
@ -264,7 +266,7 @@ impl Display for CreateTable {
|
|||
name = self.name,
|
||||
)?;
|
||||
if let Some(on_cluster) = &self.on_cluster {
|
||||
write!(f, " ON CLUSTER {}", on_cluster)?;
|
||||
write!(f, " ON CLUSTER {on_cluster}")?;
|
||||
}
|
||||
if !self.columns.is_empty() || !self.constraints.is_empty() {
|
||||
f.write_str(" (")?;
|
||||
|
@ -381,15 +383,15 @@ impl Display for CreateTable {
|
|||
match &self.table_options {
|
||||
options @ CreateTableOptions::With(_)
|
||||
| options @ CreateTableOptions::Plain(_)
|
||||
| options @ CreateTableOptions::TableProperties(_) => write!(f, " {}", options)?,
|
||||
| options @ CreateTableOptions::TableProperties(_) => write!(f, " {options}")?,
|
||||
_ => (),
|
||||
}
|
||||
|
||||
if let Some(primary_key) = &self.primary_key {
|
||||
write!(f, " PRIMARY KEY {}", primary_key)?;
|
||||
write!(f, " PRIMARY KEY {primary_key}")?;
|
||||
}
|
||||
if let Some(order_by) = &self.order_by {
|
||||
write!(f, " ORDER BY {}", order_by)?;
|
||||
write!(f, " ORDER BY {order_by}")?;
|
||||
}
|
||||
if let Some(inherits) = &self.inherits {
|
||||
write!(f, " INHERITS ({})", display_comma_separated(inherits))?;
|
||||
|
@ -401,7 +403,7 @@ impl Display for CreateTable {
|
|||
write!(f, " CLUSTER BY {cluster_by}")?;
|
||||
}
|
||||
if let options @ CreateTableOptions::Options(_) = &self.table_options {
|
||||
write!(f, " {}", options)?;
|
||||
write!(f, " {options}")?;
|
||||
}
|
||||
if let Some(external_volume) = self.external_volume.as_ref() {
|
||||
write!(f, " EXTERNAL_VOLUME = '{external_volume}'")?;
|
||||
|
|
|
@ -67,7 +67,7 @@ impl fmt::Display for KeyValueOptions {
|
|||
} else {
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
write!(f, "{}", option)?;
|
||||
write!(f, "{option}")?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
|
|
@ -90,7 +90,7 @@ pub struct CreateTableBuilder {
|
|||
pub primary_key: Option<Box<Expr>>,
|
||||
pub order_by: Option<OneOrManyWithParens<Expr>>,
|
||||
pub partition_by: Option<Box<Expr>>,
|
||||
pub cluster_by: Option<WrappedCollection<Vec<Ident>>>,
|
||||
pub cluster_by: Option<WrappedCollection<Vec<Expr>>>,
|
||||
pub clustered_by: Option<ClusteredBy>,
|
||||
pub inherits: Option<Vec<ObjectName>>,
|
||||
pub strict: bool,
|
||||
|
@ -279,7 +279,7 @@ impl CreateTableBuilder {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn cluster_by(mut self, cluster_by: Option<WrappedCollection<Vec<Ident>>>) -> Self {
|
||||
pub fn cluster_by(mut self, cluster_by: Option<WrappedCollection<Vec<Expr>>>) -> Self {
|
||||
self.cluster_by = cluster_by;
|
||||
self
|
||||
}
|
||||
|
@ -542,7 +542,7 @@ impl TryFrom<Statement> for CreateTableBuilder {
|
|||
#[derive(Default)]
|
||||
pub(crate) struct CreateTableConfiguration {
|
||||
pub partition_by: Option<Box<Expr>>,
|
||||
pub cluster_by: Option<WrappedCollection<Vec<Ident>>>,
|
||||
pub cluster_by: Option<WrappedCollection<Vec<Expr>>>,
|
||||
pub inherits: Option<Vec<ObjectName>>,
|
||||
pub table_options: CreateTableOptions,
|
||||
}
|
||||
|
|
297
src/ast/mod.rs
297
src/ast/mod.rs
|
@ -28,6 +28,7 @@ use helpers::{
|
|||
stmt_data_loading::{FileStagingCommand, StageLoadSelectItemKind},
|
||||
};
|
||||
|
||||
use core::cmp::Ordering;
|
||||
use core::ops::Deref;
|
||||
use core::{
|
||||
fmt::{self, Display},
|
||||
|
@ -60,13 +61,14 @@ pub use self::ddl::{
|
|||
AlterColumnOperation, AlterConnectorOwner, AlterIndexOperation, AlterPolicyOperation,
|
||||
AlterTableAlgorithm, AlterTableLock, AlterTableOperation, AlterType, AlterTypeAddValue,
|
||||
AlterTypeAddValuePosition, AlterTypeOperation, AlterTypeRename, AlterTypeRenameValue,
|
||||
ClusteredBy, ColumnDef, ColumnOption, ColumnOptionDef, ColumnPolicy, ColumnPolicyProperty,
|
||||
ConstraintCharacteristics, CreateConnector, CreateDomain, CreateFunction, Deduplicate,
|
||||
DeferrableInitial, DropBehavior, GeneratedAs, GeneratedExpressionMode, IdentityParameters,
|
||||
IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind, IdentityPropertyOrder,
|
||||
IndexOption, IndexType, KeyOrIndexDisplay, NullsDistinctOption, Owner, Partition,
|
||||
ProcedureParam, ReferentialAction, ReplicaIdentity, TableConstraint, TagsColumnOption,
|
||||
UserDefinedTypeCompositeAttributeDef, UserDefinedTypeRepresentation, ViewColumnDef,
|
||||
ClusteredBy, ColumnDef, ColumnOption, ColumnOptionDef, ColumnOptions, ColumnPolicy,
|
||||
ColumnPolicyProperty, ConstraintCharacteristics, CreateConnector, CreateDomain, CreateFunction,
|
||||
Deduplicate, DeferrableInitial, DropBehavior, GeneratedAs, GeneratedExpressionMode,
|
||||
IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind,
|
||||
IdentityPropertyOrder, IndexOption, IndexType, KeyOrIndexDisplay, NullsDistinctOption, Owner,
|
||||
Partition, ProcedureParam, ReferentialAction, ReplicaIdentity, TableConstraint,
|
||||
TagsColumnOption, UserDefinedTypeCompositeAttributeDef, UserDefinedTypeRepresentation,
|
||||
ViewColumnDef,
|
||||
};
|
||||
pub use self::dml::{CreateIndex, CreateTable, Delete, IndexColumn, Insert};
|
||||
pub use self::operator::{BinaryOperator, UnaryOperator};
|
||||
|
@ -172,7 +174,7 @@ fn format_statement_list(f: &mut fmt::Formatter, statements: &[Statement]) -> fm
|
|||
}
|
||||
|
||||
/// An identifier, decomposed into its value or character data and the quote style.
|
||||
#[derive(Debug, Clone, PartialOrd, Ord)]
|
||||
#[derive(Debug, Clone)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct Ident {
|
||||
|
@ -214,6 +216,35 @@ impl core::hash::Hash for Ident {
|
|||
|
||||
impl Eq for Ident {}
|
||||
|
||||
impl PartialOrd for Ident {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Ident {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
let Ident {
|
||||
value,
|
||||
quote_style,
|
||||
// exhaustiveness check; we ignore spans in ordering
|
||||
span: _,
|
||||
} = self;
|
||||
|
||||
let Ident {
|
||||
value: other_value,
|
||||
quote_style: other_quote_style,
|
||||
// exhaustiveness check; we ignore spans in ordering
|
||||
span: _,
|
||||
} = other;
|
||||
|
||||
// First compare by value, then by quote_style
|
||||
value
|
||||
.cmp(other_value)
|
||||
.then_with(|| quote_style.cmp(other_quote_style))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
/// Create a new identifier with the given value and no quotes and an empty span.
|
||||
pub fn new<S>(value: S) -> Self
|
||||
|
@ -326,7 +357,7 @@ impl ObjectNamePart {
|
|||
impl fmt::Display for ObjectNamePart {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
ObjectNamePart::Identifier(ident) => write!(f, "{}", ident),
|
||||
ObjectNamePart::Identifier(ident) => write!(f, "{ident}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -428,14 +459,22 @@ impl fmt::Display for Interval {
|
|||
pub struct StructField {
|
||||
pub field_name: Option<Ident>,
|
||||
pub field_type: DataType,
|
||||
/// Struct field options.
|
||||
/// See [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#column_name_and_column_schema)
|
||||
pub options: Option<Vec<SqlOption>>,
|
||||
}
|
||||
|
||||
impl fmt::Display for StructField {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if let Some(name) = &self.field_name {
|
||||
write!(f, "{name} {}", self.field_type)
|
||||
write!(f, "{name} {}", self.field_type)?;
|
||||
} else {
|
||||
write!(f, "{}", self.field_type)
|
||||
write!(f, "{}", self.field_type)?;
|
||||
}
|
||||
if let Some(options) = &self.options {
|
||||
write!(f, " OPTIONS({})", display_separated(options, ", "))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -740,7 +779,7 @@ pub enum Expr {
|
|||
/// `[ NOT ] IN (SELECT ...)`
|
||||
InSubquery {
|
||||
expr: Box<Expr>,
|
||||
subquery: Box<SetExpr>,
|
||||
subquery: Box<Query>,
|
||||
negated: bool,
|
||||
},
|
||||
/// `[ NOT ] IN UNNEST(array_expression)`
|
||||
|
@ -967,6 +1006,8 @@ pub enum Expr {
|
|||
/// not `< 0` nor `1, 2, 3` as allowed in a `<simple when clause>` per
|
||||
/// <https://jakewheat.github.io/sql-overview/sql-2011-foundation-grammar.html#simple-when-clause>
|
||||
Case {
|
||||
case_token: AttachedToken,
|
||||
end_token: AttachedToken,
|
||||
operand: Option<Box<Expr>>,
|
||||
conditions: Vec<CaseWhen>,
|
||||
else_result: Option<Box<Expr>>,
|
||||
|
@ -1169,8 +1210,8 @@ pub enum AccessExpr {
|
|||
impl fmt::Display for AccessExpr {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
AccessExpr::Dot(expr) => write!(f, ".{}", expr),
|
||||
AccessExpr::Subscript(subscript) => write!(f, "[{}]", subscript),
|
||||
AccessExpr::Dot(expr) => write!(f, ".{expr}"),
|
||||
AccessExpr::Subscript(subscript) => write!(f, "[{subscript}]"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1372,12 +1413,12 @@ impl fmt::Display for Expr {
|
|||
match self {
|
||||
Expr::Identifier(s) => write!(f, "{s}"),
|
||||
Expr::Wildcard(_) => f.write_str("*"),
|
||||
Expr::QualifiedWildcard(prefix, _) => write!(f, "{}.*", prefix),
|
||||
Expr::QualifiedWildcard(prefix, _) => write!(f, "{prefix}.*"),
|
||||
Expr::CompoundIdentifier(s) => write!(f, "{}", display_separated(s, ".")),
|
||||
Expr::CompoundFieldAccess { root, access_chain } => {
|
||||
write!(f, "{}", root)?;
|
||||
write!(f, "{root}")?;
|
||||
for field in access_chain {
|
||||
write!(f, "{}", field)?;
|
||||
write!(f, "{field}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1506,7 +1547,7 @@ impl fmt::Display for Expr {
|
|||
} => {
|
||||
let not_ = if *negated { "NOT " } else { "" };
|
||||
if form.is_none() {
|
||||
write!(f, "{} IS {}NORMALIZED", expr, not_)
|
||||
write!(f, "{expr} IS {not_}NORMALIZED")
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
|
@ -1675,6 +1716,8 @@ impl fmt::Display for Expr {
|
|||
}
|
||||
Expr::Function(fun) => fun.fmt(f),
|
||||
Expr::Case {
|
||||
case_token: _,
|
||||
end_token: _,
|
||||
operand,
|
||||
conditions,
|
||||
else_result,
|
||||
|
@ -1826,7 +1869,7 @@ impl fmt::Display for Expr {
|
|||
}
|
||||
}
|
||||
Expr::Named { expr, name } => {
|
||||
write!(f, "{} AS {}", expr, name)
|
||||
write!(f, "{expr} AS {name}")
|
||||
}
|
||||
Expr::Dictionary(fields) => {
|
||||
write!(f, "{{{}}}", display_comma_separated(fields))
|
||||
|
@ -2382,7 +2425,7 @@ impl fmt::Display for ConditionalStatements {
|
|||
}
|
||||
Ok(())
|
||||
}
|
||||
ConditionalStatements::BeginEnd(bes) => write!(f, "{}", bes),
|
||||
ConditionalStatements::BeginEnd(bes) => write!(f, "{bes}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2902,9 +2945,7 @@ impl Display for Set {
|
|||
write!(
|
||||
f,
|
||||
"SET {modifier}ROLE {role_name}",
|
||||
modifier = context_modifier
|
||||
.map(|m| format!("{}", m))
|
||||
.unwrap_or_default()
|
||||
modifier = context_modifier.map(|m| format!("{m}")).unwrap_or_default()
|
||||
)
|
||||
}
|
||||
Self::SetSessionParam(kind) => write!(f, "SET {kind}"),
|
||||
|
@ -2937,7 +2978,7 @@ impl Display for Set {
|
|||
charset_name,
|
||||
collation_name,
|
||||
} => {
|
||||
write!(f, "SET NAMES {}", charset_name)?;
|
||||
write!(f, "SET NAMES {charset_name}")?;
|
||||
|
||||
if let Some(collation) = collation_name {
|
||||
f.write_str(" COLLATE ")?;
|
||||
|
@ -2960,7 +3001,7 @@ impl Display for Set {
|
|||
write!(
|
||||
f,
|
||||
"SET {}{}{} = {}",
|
||||
scope.map(|s| format!("{}", s)).unwrap_or_default(),
|
||||
scope.map(|s| format!("{s}")).unwrap_or_default(),
|
||||
if *hivevar { "HIVEVAR:" } else { "" },
|
||||
variable,
|
||||
display_comma_separated(values)
|
||||
|
@ -2978,6 +3019,36 @@ impl From<Set> for Statement {
|
|||
}
|
||||
}
|
||||
|
||||
/// A representation of a `WHEN` arm with all the identifiers catched and the statements to execute
|
||||
/// for the arm.
|
||||
///
|
||||
/// Snowflake: <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/exception>
|
||||
/// BigQuery: <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend>
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct ExceptionWhen {
|
||||
pub idents: Vec<Ident>,
|
||||
pub statements: Vec<Statement>,
|
||||
}
|
||||
|
||||
impl Display for ExceptionWhen {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"WHEN {idents} THEN",
|
||||
idents = display_separated(&self.idents, " OR ")
|
||||
)?;
|
||||
|
||||
if !self.statements.is_empty() {
|
||||
write!(f, " ")?;
|
||||
format_statement_list(f, &self.statements)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A top-level statement (SELECT, INSERT, CREATE, etc.)
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
|
@ -3014,9 +3085,6 @@ pub enum Statement {
|
|||
/// TABLE - optional keyword;
|
||||
table: bool,
|
||||
/// Postgres-specific option
|
||||
/// [ TRUNCATE TABLE ONLY ]
|
||||
only: bool,
|
||||
/// Postgres-specific option
|
||||
/// [ RESTART IDENTITY | CONTINUE IDENTITY ]
|
||||
identity: Option<TruncateIdentityOption>,
|
||||
/// Postgres-specific option
|
||||
|
@ -3669,17 +3737,20 @@ pub enum Statement {
|
|||
/// END;
|
||||
/// ```
|
||||
statements: Vec<Statement>,
|
||||
/// Statements of an exception clause.
|
||||
/// Exception handling with exception clauses.
|
||||
/// Example:
|
||||
/// ```sql
|
||||
/// BEGIN
|
||||
/// SELECT 1;
|
||||
/// EXCEPTION WHEN ERROR THEN
|
||||
/// SELECT 2;
|
||||
/// SELECT 3;
|
||||
/// END;
|
||||
/// EXCEPTION
|
||||
/// WHEN EXCEPTION_1 THEN
|
||||
/// SELECT 2;
|
||||
/// WHEN EXCEPTION_2 OR EXCEPTION_3 THEN
|
||||
/// SELECT 3;
|
||||
/// WHEN OTHER THEN
|
||||
/// SELECT 4;
|
||||
/// ```
|
||||
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend>
|
||||
exception_statements: Option<Vec<Statement>>,
|
||||
/// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/exception>
|
||||
exception: Option<Vec<ExceptionWhen>>,
|
||||
/// TRUE if the statement has an `END` keyword.
|
||||
has_end_keyword: bool,
|
||||
},
|
||||
|
@ -3724,6 +3795,14 @@ pub enum Statement {
|
|||
/// `<schema name> | AUTHORIZATION <schema authorization identifier> | <schema name> AUTHORIZATION <schema authorization identifier>`
|
||||
schema_name: SchemaName,
|
||||
if_not_exists: bool,
|
||||
/// Schema properties.
|
||||
///
|
||||
/// ```sql
|
||||
/// CREATE SCHEMA myschema WITH (key1='value1');
|
||||
/// ```
|
||||
///
|
||||
/// [Trino](https://trino.io/docs/current/sql/create-schema.html)
|
||||
with: Option<Vec<SqlOption>>,
|
||||
/// Schema options.
|
||||
///
|
||||
/// ```sql
|
||||
|
@ -3864,6 +3943,7 @@ pub enum Statement {
|
|||
or_alter: bool,
|
||||
name: ObjectName,
|
||||
params: Option<Vec<ProcedureParam>>,
|
||||
language: Option<Ident>,
|
||||
body: ConditionalStatements,
|
||||
},
|
||||
/// ```sql
|
||||
|
@ -4164,7 +4244,7 @@ pub enum Statement {
|
|||
/// ```sql
|
||||
/// NOTIFY channel [ , payload ]
|
||||
/// ```
|
||||
/// send a notification event together with an optional “payload” string to channel
|
||||
/// send a notification event together with an optional "payload" string to channel
|
||||
///
|
||||
/// See Postgres <https://www.postgresql.org/docs/current/sql-notify.html>
|
||||
NOTIFY {
|
||||
|
@ -4323,7 +4403,7 @@ impl fmt::Display for Statement {
|
|||
write!(f, "{describe_alias} ")?;
|
||||
|
||||
if let Some(format) = hive_format {
|
||||
write!(f, "{} ", format)?;
|
||||
write!(f, "{format} ")?;
|
||||
}
|
||||
if *has_table_keyword {
|
||||
write!(f, "TABLE ")?;
|
||||
|
@ -4425,17 +4505,15 @@ impl fmt::Display for Statement {
|
|||
table_names,
|
||||
partitions,
|
||||
table,
|
||||
only,
|
||||
identity,
|
||||
cascade,
|
||||
on_cluster,
|
||||
} => {
|
||||
let table = if *table { "TABLE " } else { "" };
|
||||
let only = if *only { "ONLY " } else { "" };
|
||||
|
||||
write!(
|
||||
f,
|
||||
"TRUNCATE {table}{only}{table_names}",
|
||||
"TRUNCATE {table}{table_names}",
|
||||
table_names = display_comma_separated(table_names)
|
||||
)?;
|
||||
|
||||
|
@ -4769,6 +4847,7 @@ impl fmt::Display for Statement {
|
|||
name,
|
||||
or_alter,
|
||||
params,
|
||||
language,
|
||||
body,
|
||||
} => {
|
||||
write!(
|
||||
|
@ -4784,6 +4863,10 @@ impl fmt::Display for Statement {
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(language) = language {
|
||||
write!(f, " LANGUAGE {language}")?;
|
||||
}
|
||||
|
||||
write!(f, " AS {body}")
|
||||
}
|
||||
Statement::CreateMacro {
|
||||
|
@ -5156,7 +5239,7 @@ impl fmt::Display for Statement {
|
|||
if *only {
|
||||
write!(f, "ONLY ")?;
|
||||
}
|
||||
write!(f, "{name} ", name = name)?;
|
||||
write!(f, "{name} ")?;
|
||||
if let Some(cluster) = on_cluster {
|
||||
write!(f, "ON CLUSTER {cluster} ")?;
|
||||
}
|
||||
|
@ -5234,7 +5317,7 @@ impl fmt::Display for Statement {
|
|||
)?;
|
||||
if !session_params.options.is_empty() {
|
||||
if *set {
|
||||
write!(f, " {}", session_params)?;
|
||||
write!(f, " {session_params}")?;
|
||||
} else {
|
||||
let options = session_params
|
||||
.options
|
||||
|
@ -5268,7 +5351,7 @@ impl fmt::Display for Statement {
|
|||
if *purge { " PURGE" } else { "" },
|
||||
)?;
|
||||
if let Some(table_name) = table.as_ref() {
|
||||
write!(f, " ON {}", table_name)?;
|
||||
write!(f, " ON {table_name}")?;
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
@ -5518,12 +5601,12 @@ impl fmt::Display for Statement {
|
|||
transaction,
|
||||
modifier,
|
||||
statements,
|
||||
exception_statements,
|
||||
exception,
|
||||
has_end_keyword,
|
||||
} => {
|
||||
if *syntax_begin {
|
||||
if let Some(modifier) = *modifier {
|
||||
write!(f, "BEGIN {}", modifier)?;
|
||||
write!(f, "BEGIN {modifier}")?;
|
||||
} else {
|
||||
write!(f, "BEGIN")?;
|
||||
}
|
||||
|
@ -5540,11 +5623,10 @@ impl fmt::Display for Statement {
|
|||
write!(f, " ")?;
|
||||
format_statement_list(f, statements)?;
|
||||
}
|
||||
if let Some(exception_statements) = exception_statements {
|
||||
write!(f, " EXCEPTION WHEN ERROR THEN")?;
|
||||
if !exception_statements.is_empty() {
|
||||
write!(f, " ")?;
|
||||
format_statement_list(f, exception_statements)?;
|
||||
if let Some(exception_when) = exception {
|
||||
write!(f, " EXCEPTION")?;
|
||||
for when in exception_when {
|
||||
write!(f, " {when}")?;
|
||||
}
|
||||
}
|
||||
if *has_end_keyword {
|
||||
|
@ -5560,7 +5642,7 @@ impl fmt::Display for Statement {
|
|||
if *end_syntax {
|
||||
write!(f, "END")?;
|
||||
if let Some(modifier) = *modifier {
|
||||
write!(f, " {}", modifier)?;
|
||||
write!(f, " {modifier}")?;
|
||||
}
|
||||
if *chain {
|
||||
write!(f, " AND CHAIN")?;
|
||||
|
@ -5586,6 +5668,7 @@ impl fmt::Display for Statement {
|
|||
Statement::CreateSchema {
|
||||
schema_name,
|
||||
if_not_exists,
|
||||
with,
|
||||
options,
|
||||
default_collate_spec,
|
||||
} => {
|
||||
|
@ -5600,6 +5683,10 @@ impl fmt::Display for Statement {
|
|||
write!(f, " DEFAULT COLLATE {collate}")?;
|
||||
}
|
||||
|
||||
if let Some(with) = with {
|
||||
write!(f, " WITH ({})", display_comma_separated(with))?;
|
||||
}
|
||||
|
||||
if let Some(options) = options {
|
||||
write!(f, " OPTIONS({})", display_comma_separated(options))?;
|
||||
}
|
||||
|
@ -5654,7 +5741,7 @@ impl fmt::Display for Statement {
|
|||
write!(f, " GRANTED BY {grantor}")?;
|
||||
}
|
||||
if let Some(cascade) = cascade {
|
||||
write!(f, " {}", cascade)?;
|
||||
write!(f, " {cascade}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -5833,13 +5920,13 @@ impl fmt::Display for Statement {
|
|||
if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" },
|
||||
)?;
|
||||
if !directory_table_params.options.is_empty() {
|
||||
write!(f, " DIRECTORY=({})", directory_table_params)?;
|
||||
write!(f, " DIRECTORY=({directory_table_params})")?;
|
||||
}
|
||||
if !file_format.options.is_empty() {
|
||||
write!(f, " FILE_FORMAT=({})", file_format)?;
|
||||
write!(f, " FILE_FORMAT=({file_format})")?;
|
||||
}
|
||||
if !copy_options.options.is_empty() {
|
||||
write!(f, " COPY_OPTIONS=({})", copy_options)?;
|
||||
write!(f, " COPY_OPTIONS=({copy_options})")?;
|
||||
}
|
||||
if comment.is_some() {
|
||||
write!(f, " COMMENT='{}'", comment.as_ref().unwrap())?;
|
||||
|
@ -5862,7 +5949,7 @@ impl fmt::Display for Statement {
|
|||
validation_mode,
|
||||
partition,
|
||||
} => {
|
||||
write!(f, "COPY INTO {}", into)?;
|
||||
write!(f, "COPY INTO {into}")?;
|
||||
if let Some(into_columns) = into_columns {
|
||||
write!(f, " ({})", display_comma_separated(into_columns))?;
|
||||
}
|
||||
|
@ -5878,12 +5965,12 @@ impl fmt::Display for Statement {
|
|||
)?;
|
||||
}
|
||||
if let Some(from_obj_alias) = from_obj_alias {
|
||||
write!(f, " AS {}", from_obj_alias)?;
|
||||
write!(f, " AS {from_obj_alias}")?;
|
||||
}
|
||||
write!(f, ")")?;
|
||||
} else if let Some(from_obj) = from_obj {
|
||||
// Standard data load
|
||||
write!(f, " FROM {}{}", from_obj, stage_params)?;
|
||||
write!(f, " FROM {from_obj}{stage_params}")?;
|
||||
if let Some(from_obj_alias) = from_obj_alias {
|
||||
write!(f, " AS {from_obj_alias}")?;
|
||||
}
|
||||
|
@ -5896,24 +5983,24 @@ impl fmt::Display for Statement {
|
|||
write!(f, " FILES = ('{}')", display_separated(files, "', '"))?;
|
||||
}
|
||||
if let Some(pattern) = pattern {
|
||||
write!(f, " PATTERN = '{}'", pattern)?;
|
||||
write!(f, " PATTERN = '{pattern}'")?;
|
||||
}
|
||||
if let Some(partition) = partition {
|
||||
write!(f, " PARTITION BY {partition}")?;
|
||||
}
|
||||
if !file_format.options.is_empty() {
|
||||
write!(f, " FILE_FORMAT=({})", file_format)?;
|
||||
write!(f, " FILE_FORMAT=({file_format})")?;
|
||||
}
|
||||
if !copy_options.options.is_empty() {
|
||||
match kind {
|
||||
CopyIntoSnowflakeKind::Table => {
|
||||
write!(f, " COPY_OPTIONS=({})", copy_options)?
|
||||
write!(f, " COPY_OPTIONS=({copy_options})")?
|
||||
}
|
||||
CopyIntoSnowflakeKind::Location => write!(f, " {copy_options}")?,
|
||||
}
|
||||
}
|
||||
if let Some(validation_mode) = validation_mode {
|
||||
write!(f, " VALIDATION_MODE = {}", validation_mode)?;
|
||||
write!(f, " VALIDATION_MODE = {validation_mode}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -5959,10 +6046,10 @@ impl fmt::Display for Statement {
|
|||
} => {
|
||||
write!(f, "OPTIMIZE TABLE {name}")?;
|
||||
if let Some(on_cluster) = on_cluster {
|
||||
write!(f, " ON CLUSTER {on_cluster}", on_cluster = on_cluster)?;
|
||||
write!(f, " ON CLUSTER {on_cluster}")?;
|
||||
}
|
||||
if let Some(partition) = partition {
|
||||
write!(f, " {partition}", partition = partition)?;
|
||||
write!(f, " {partition}")?;
|
||||
}
|
||||
if *include_final {
|
||||
write!(f, " FINAL")?;
|
||||
|
@ -6089,7 +6176,7 @@ impl fmt::Display for SetAssignment {
|
|||
write!(
|
||||
f,
|
||||
"{}{} = {}",
|
||||
self.scope.map(|s| format!("{}", s)).unwrap_or_default(),
|
||||
self.scope.map(|s| format!("{s}")).unwrap_or_default(),
|
||||
self.name,
|
||||
self.value
|
||||
)
|
||||
|
@ -6106,10 +6193,17 @@ pub struct TruncateTableTarget {
|
|||
/// name of the table being truncated
|
||||
#[cfg_attr(feature = "visitor", visit(with = "visit_relation"))]
|
||||
pub name: ObjectName,
|
||||
/// Postgres-specific option
|
||||
/// [ TRUNCATE TABLE ONLY ]
|
||||
/// <https://www.postgresql.org/docs/current/sql-truncate.html>
|
||||
pub only: bool,
|
||||
}
|
||||
|
||||
impl fmt::Display for TruncateTableTarget {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
if self.only {
|
||||
write!(f, "ONLY ")?;
|
||||
};
|
||||
write!(f, "{}", self.name)
|
||||
}
|
||||
}
|
||||
|
@ -6811,7 +6905,7 @@ impl fmt::Display for GranteeName {
|
|||
match self {
|
||||
GranteeName::ObjectName(name) => name.fmt(f),
|
||||
GranteeName::UserHost { user, host } => {
|
||||
write!(f, "{}@{}", user, host)
|
||||
write!(f, "{user}@{host}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6826,6 +6920,12 @@ pub enum GrantObjects {
|
|||
AllSequencesInSchema { schemas: Vec<ObjectName> },
|
||||
/// Grant privileges on `ALL TABLES IN SCHEMA <schema_name> [, ...]`
|
||||
AllTablesInSchema { schemas: Vec<ObjectName> },
|
||||
/// Grant privileges on `FUTURE SCHEMAS IN DATABASE <database_name> [, ...]`
|
||||
FutureSchemasInDatabase { databases: Vec<ObjectName> },
|
||||
/// Grant privileges on `FUTURE TABLES IN SCHEMA <schema_name> [, ...]`
|
||||
FutureTablesInSchema { schemas: Vec<ObjectName> },
|
||||
/// Grant privileges on `FUTURE VIEWS IN SCHEMA <schema_name> [, ...]`
|
||||
FutureViewsInSchema { schemas: Vec<ObjectName> },
|
||||
/// Grant privileges on specific databases
|
||||
Databases(Vec<ObjectName>),
|
||||
/// Grant privileges on specific schemas
|
||||
|
@ -6894,6 +6994,27 @@ impl fmt::Display for GrantObjects {
|
|||
display_comma_separated(schemas)
|
||||
)
|
||||
}
|
||||
GrantObjects::FutureSchemasInDatabase { databases } => {
|
||||
write!(
|
||||
f,
|
||||
"FUTURE SCHEMAS IN DATABASE {}",
|
||||
display_comma_separated(databases)
|
||||
)
|
||||
}
|
||||
GrantObjects::FutureTablesInSchema { schemas } => {
|
||||
write!(
|
||||
f,
|
||||
"FUTURE TABLES IN SCHEMA {}",
|
||||
display_comma_separated(schemas)
|
||||
)
|
||||
}
|
||||
GrantObjects::FutureViewsInSchema { schemas } => {
|
||||
write!(
|
||||
f,
|
||||
"FUTURE VIEWS IN SCHEMA {}",
|
||||
display_comma_separated(schemas)
|
||||
)
|
||||
}
|
||||
GrantObjects::ResourceMonitors(objects) => {
|
||||
write!(f, "RESOURCE MONITOR {}", display_comma_separated(objects))
|
||||
}
|
||||
|
@ -6981,7 +7102,7 @@ pub enum AssignmentTarget {
|
|||
impl fmt::Display for AssignmentTarget {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
AssignmentTarget::ColumnName(column) => write!(f, "{}", column),
|
||||
AssignmentTarget::ColumnName(column) => write!(f, "{column}"),
|
||||
AssignmentTarget::Tuple(columns) => write!(f, "({})", display_comma_separated(columns)),
|
||||
}
|
||||
}
|
||||
|
@ -7226,8 +7347,8 @@ impl fmt::Display for FunctionArguments {
|
|||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
FunctionArguments::None => Ok(()),
|
||||
FunctionArguments::Subquery(query) => write!(f, "({})", query),
|
||||
FunctionArguments::List(args) => write!(f, "({})", args),
|
||||
FunctionArguments::Subquery(query) => write!(f, "({query})"),
|
||||
FunctionArguments::List(args) => write!(f, "({args})"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -7248,7 +7369,7 @@ pub struct FunctionArgumentList {
|
|||
impl fmt::Display for FunctionArgumentList {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
if let Some(duplicate_treatment) = self.duplicate_treatment {
|
||||
write!(f, "{} ", duplicate_treatment)?;
|
||||
write!(f, "{duplicate_treatment} ")?;
|
||||
}
|
||||
write!(f, "{}", display_comma_separated(&self.args))?;
|
||||
if !self.clauses.is_empty() {
|
||||
|
@ -7308,7 +7429,7 @@ impl fmt::Display for FunctionArgumentClause {
|
|||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment) => {
|
||||
write!(f, "{}", null_treatment)
|
||||
write!(f, "{null_treatment}")
|
||||
}
|
||||
FunctionArgumentClause::OrderBy(order_by) => {
|
||||
write!(f, "ORDER BY {}", display_comma_separated(order_by))
|
||||
|
@ -7764,12 +7885,12 @@ pub enum SqlOption {
|
|||
impl fmt::Display for SqlOption {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
SqlOption::Clustered(c) => write!(f, "{}", c),
|
||||
SqlOption::Clustered(c) => write!(f, "{c}"),
|
||||
SqlOption::Ident(ident) => {
|
||||
write!(f, "{}", ident)
|
||||
write!(f, "{ident}")
|
||||
}
|
||||
SqlOption::KeyValue { key: name, value } => {
|
||||
write!(f, "{} = {}", name, value)
|
||||
write!(f, "{name} = {value}")
|
||||
}
|
||||
SqlOption::Partition {
|
||||
column_name,
|
||||
|
@ -7809,7 +7930,7 @@ impl fmt::Display for SqlOption {
|
|||
SqlOption::NamedParenthesizedList(value) => {
|
||||
write!(f, "{} = ", value.key)?;
|
||||
if let Some(key) = &value.name {
|
||||
write!(f, "{}", key)?;
|
||||
write!(f, "{key}")?;
|
||||
}
|
||||
if !value.values.is_empty() {
|
||||
write!(f, "({})", display_comma_separated(&value.values))?
|
||||
|
@ -7866,7 +7987,7 @@ impl fmt::Display for AttachDuckDBDatabaseOption {
|
|||
AttachDuckDBDatabaseOption::ReadOnly(Some(true)) => write!(f, "READ_ONLY true"),
|
||||
AttachDuckDBDatabaseOption::ReadOnly(Some(false)) => write!(f, "READ_ONLY false"),
|
||||
AttachDuckDBDatabaseOption::ReadOnly(None) => write!(f, "READ_ONLY"),
|
||||
AttachDuckDBDatabaseOption::Type(t) => write!(f, "TYPE {}", t),
|
||||
AttachDuckDBDatabaseOption::Type(t) => write!(f, "TYPE {t}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -9389,10 +9510,10 @@ impl fmt::Display for ShowStatementIn {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.clause)?;
|
||||
if let Some(parent_type) = &self.parent_type {
|
||||
write!(f, " {}", parent_type)?;
|
||||
write!(f, " {parent_type}")?;
|
||||
}
|
||||
if let Some(parent_name) = &self.parent_name {
|
||||
write!(f, " {}", parent_name)?;
|
||||
write!(f, " {parent_name}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -9473,7 +9594,7 @@ impl fmt::Display for TableObject {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Self::TableName(table_name) => write!(f, "{table_name}"),
|
||||
Self::TableFunction(func) => write!(f, "FUNCTION {}", func),
|
||||
Self::TableFunction(func) => write!(f, "FUNCTION {func}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -9661,7 +9782,7 @@ pub struct ReturnStatement {
|
|||
impl fmt::Display for ReturnStatement {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match &self.value {
|
||||
Some(ReturnStatementValue::Expr(expr)) => write!(f, "RETURN {}", expr),
|
||||
Some(ReturnStatementValue::Expr(expr)) => write!(f, "RETURN {expr}"),
|
||||
None => write!(f, "RETURN"),
|
||||
}
|
||||
}
|
||||
|
@ -9712,6 +9833,8 @@ impl fmt::Display for NullInclusion {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tokenizer::Location;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
|
@ -10007,4 +10130,16 @@ mod tests {
|
|||
test_steps(OneOrManyWithParens::Many(vec![2]), vec![2], 3);
|
||||
test_steps(OneOrManyWithParens::Many(vec![3, 4]), vec![3, 4], 4);
|
||||
}
|
||||
|
||||
// Tests that the position in the code of an `Ident` does not affect its
|
||||
// ordering.
|
||||
#[test]
|
||||
fn test_ident_ord() {
|
||||
let mut a = Ident::with_span(Span::new(Location::new(1, 1), Location::new(1, 1)), "a");
|
||||
let mut b = Ident::with_span(Span::new(Location::new(2, 2), Location::new(2, 2)), "b");
|
||||
|
||||
assert!(a < b);
|
||||
std::mem::swap(&mut a.span, &mut b.span);
|
||||
assert!(a < b);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -341,7 +341,7 @@ pub struct Select {
|
|||
/// DISTRIBUTE BY (Hive)
|
||||
pub distribute_by: Vec<Expr>,
|
||||
/// SORT BY (Hive)
|
||||
pub sort_by: Vec<Expr>,
|
||||
pub sort_by: Vec<OrderByExpr>,
|
||||
/// HAVING
|
||||
pub having: Option<Expr>,
|
||||
/// WINDOW AS
|
||||
|
@ -1047,7 +1047,7 @@ impl fmt::Display for ConnectBy {
|
|||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct Setting {
|
||||
pub key: Ident,
|
||||
pub value: Value,
|
||||
pub value: Expr,
|
||||
}
|
||||
|
||||
impl fmt::Display for Setting {
|
||||
|
@ -1183,7 +1183,7 @@ impl fmt::Display for TableIndexHints {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{} {} ", self.hint_type, self.index_type)?;
|
||||
if let Some(for_clause) = &self.for_clause {
|
||||
write!(f, "FOR {} ", for_clause)?;
|
||||
write!(f, "FOR {for_clause} ")?;
|
||||
}
|
||||
write!(f, "({})", display_comma_separated(&self.index_names))
|
||||
}
|
||||
|
@ -1459,7 +1459,7 @@ impl fmt::Display for TableSampleQuantity {
|
|||
}
|
||||
write!(f, "{}", self.value)?;
|
||||
if let Some(unit) = &self.unit {
|
||||
write!(f, " {}", unit)?;
|
||||
write!(f, " {unit}")?;
|
||||
}
|
||||
if self.parenthesized {
|
||||
write!(f, ")")?;
|
||||
|
@ -1552,7 +1552,7 @@ impl fmt::Display for TableSampleBucket {
|
|||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "BUCKET {} OUT OF {}", self.bucket, self.total)?;
|
||||
if let Some(on) = &self.on {
|
||||
write!(f, " ON {}", on)?;
|
||||
write!(f, " ON {on}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1561,19 +1561,19 @@ impl fmt::Display for TableSample {
|
|||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.modifier)?;
|
||||
if let Some(name) = &self.name {
|
||||
write!(f, " {}", name)?;
|
||||
write!(f, " {name}")?;
|
||||
}
|
||||
if let Some(quantity) = &self.quantity {
|
||||
write!(f, " {}", quantity)?;
|
||||
write!(f, " {quantity}")?;
|
||||
}
|
||||
if let Some(seed) = &self.seed {
|
||||
write!(f, " {}", seed)?;
|
||||
write!(f, " {seed}")?;
|
||||
}
|
||||
if let Some(bucket) = &self.bucket {
|
||||
write!(f, " ({})", bucket)?;
|
||||
write!(f, " ({bucket})")?;
|
||||
}
|
||||
if let Some(offset) = &self.offset {
|
||||
write!(f, " OFFSET {}", offset)?;
|
||||
write!(f, " OFFSET {offset}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1651,7 +1651,7 @@ impl fmt::Display for RowsPerMatch {
|
|||
RowsPerMatch::AllRows(mode) => {
|
||||
write!(f, "ALL ROWS PER MATCH")?;
|
||||
if let Some(mode) = mode {
|
||||
write!(f, " {}", mode)?;
|
||||
write!(f, " {mode}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1777,7 +1777,7 @@ impl fmt::Display for MatchRecognizePattern {
|
|||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
use MatchRecognizePattern::*;
|
||||
match self {
|
||||
Symbol(symbol) => write!(f, "{}", symbol),
|
||||
Symbol(symbol) => write!(f, "{symbol}"),
|
||||
Exclude(symbol) => write!(f, "{{- {symbol} -}}"),
|
||||
Permute(symbols) => write!(f, "PERMUTE({})", display_comma_separated(symbols)),
|
||||
Concat(patterns) => write!(f, "{}", display_separated(patterns, " ")),
|
||||
|
@ -2148,7 +2148,7 @@ impl fmt::Display for TableAliasColumnDef {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.name)?;
|
||||
if let Some(ref data_type) = self.data_type {
|
||||
write!(f, " {}", data_type)?;
|
||||
write!(f, " {data_type}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -2398,7 +2398,7 @@ impl fmt::Display for OrderBy {
|
|||
write!(f, " {}", display_comma_separated(exprs))?;
|
||||
}
|
||||
OrderByKind::All(all) => {
|
||||
write!(f, " ALL{}", all)?;
|
||||
write!(f, " ALL{all}")?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2429,7 +2429,7 @@ impl fmt::Display for OrderByExpr {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}{}", self.expr, self.options)?;
|
||||
if let Some(ref with_fill) = self.with_fill {
|
||||
write!(f, " {}", with_fill)?
|
||||
write!(f, " {with_fill}")?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -2452,13 +2452,13 @@ impl fmt::Display for WithFill {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "WITH FILL")?;
|
||||
if let Some(ref from) = self.from {
|
||||
write!(f, " FROM {}", from)?;
|
||||
write!(f, " FROM {from}")?;
|
||||
}
|
||||
if let Some(ref to) = self.to {
|
||||
write!(f, " TO {}", to)?;
|
||||
write!(f, " TO {to}")?;
|
||||
}
|
||||
if let Some(ref step) = self.step {
|
||||
write!(f, " STEP {}", step)?;
|
||||
write!(f, " STEP {step}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -2487,7 +2487,7 @@ impl fmt::Display for InterpolateExpr {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.column)?;
|
||||
if let Some(ref expr) = self.expr {
|
||||
write!(f, " AS {}", expr)?;
|
||||
write!(f, " AS {expr}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -2565,7 +2565,7 @@ impl fmt::Display for LimitClause {
|
|||
Ok(())
|
||||
}
|
||||
LimitClause::OffsetCommaLimit { offset, limit } => {
|
||||
write!(f, " LIMIT {}, {}", offset, limit)
|
||||
write!(f, " LIMIT {offset}, {limit}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2775,12 +2775,12 @@ impl fmt::Display for PipeOperator {
|
|||
write!(f, "DROP {}", display_comma_separated(columns.as_slice()))
|
||||
}
|
||||
PipeOperator::As { alias } => {
|
||||
write!(f, "AS {}", alias)
|
||||
write!(f, "AS {alias}")
|
||||
}
|
||||
PipeOperator::Limit { expr, offset } => {
|
||||
write!(f, "LIMIT {}", expr)?;
|
||||
write!(f, "LIMIT {expr}")?;
|
||||
if let Some(offset) = offset {
|
||||
write!(f, " OFFSET {}", offset)?;
|
||||
write!(f, " OFFSET {offset}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -2803,14 +2803,14 @@ impl fmt::Display for PipeOperator {
|
|||
}
|
||||
|
||||
PipeOperator::Where { expr } => {
|
||||
write!(f, "WHERE {}", expr)
|
||||
write!(f, "WHERE {expr}")
|
||||
}
|
||||
PipeOperator::OrderBy { exprs } => {
|
||||
write!(f, "ORDER BY {}", display_comma_separated(exprs.as_slice()))
|
||||
}
|
||||
|
||||
PipeOperator::TableSample { sample } => {
|
||||
write!(f, "{}", sample)
|
||||
write!(f, "{sample}")
|
||||
}
|
||||
PipeOperator::Rename { mappings } => {
|
||||
write!(f, "RENAME {}", display_comma_separated(mappings))
|
||||
|
@ -3169,7 +3169,7 @@ pub enum FormatClause {
|
|||
impl fmt::Display for FormatClause {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
FormatClause::Identifier(ident) => write!(f, "FORMAT {}", ident),
|
||||
FormatClause::Identifier(ident) => write!(f, "FORMAT {ident}"),
|
||||
FormatClause::Null => write!(f, "FORMAT NULL"),
|
||||
}
|
||||
}
|
||||
|
@ -3231,9 +3231,9 @@ impl fmt::Display for ForClause {
|
|||
without_array_wrapper,
|
||||
} => {
|
||||
write!(f, "FOR JSON ")?;
|
||||
write!(f, "{}", for_json)?;
|
||||
write!(f, "{for_json}")?;
|
||||
if let Some(root) = root {
|
||||
write!(f, ", ROOT('{}')", root)?;
|
||||
write!(f, ", ROOT('{root}')")?;
|
||||
}
|
||||
if *include_null_values {
|
||||
write!(f, ", INCLUDE_NULL_VALUES")?;
|
||||
|
@ -3251,7 +3251,7 @@ impl fmt::Display for ForClause {
|
|||
r#type,
|
||||
} => {
|
||||
write!(f, "FOR XML ")?;
|
||||
write!(f, "{}", for_xml)?;
|
||||
write!(f, "{for_xml}")?;
|
||||
if *binary_base64 {
|
||||
write!(f, ", BINARY BASE64")?;
|
||||
}
|
||||
|
@ -3259,7 +3259,7 @@ impl fmt::Display for ForClause {
|
|||
write!(f, ", TYPE")?;
|
||||
}
|
||||
if let Some(root) = root {
|
||||
write!(f, ", ROOT('{}')", root)?;
|
||||
write!(f, ", ROOT('{root}')")?;
|
||||
}
|
||||
if *elements {
|
||||
write!(f, ", ELEMENTS")?;
|
||||
|
@ -3286,7 +3286,7 @@ impl fmt::Display for ForXml {
|
|||
ForXml::Raw(root) => {
|
||||
write!(f, "RAW")?;
|
||||
if let Some(root) = root {
|
||||
write!(f, "('{}')", root)?;
|
||||
write!(f, "('{root}')")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -3295,7 +3295,7 @@ impl fmt::Display for ForXml {
|
|||
ForXml::Path(root) => {
|
||||
write!(f, "PATH")?;
|
||||
if let Some(root) = root {
|
||||
write!(f, "('{}')", root)?;
|
||||
write!(f, "('{root}')")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -3358,7 +3358,7 @@ impl fmt::Display for JsonTableColumn {
|
|||
JsonTableColumn::Named(json_table_named_column) => {
|
||||
write!(f, "{json_table_named_column}")
|
||||
}
|
||||
JsonTableColumn::ForOrdinality(ident) => write!(f, "{} FOR ORDINALITY", ident),
|
||||
JsonTableColumn::ForOrdinality(ident) => write!(f, "{ident} FOR ORDINALITY"),
|
||||
JsonTableColumn::Nested(json_table_nested_column) => {
|
||||
write!(f, "{json_table_nested_column}")
|
||||
}
|
||||
|
@ -3424,10 +3424,10 @@ impl fmt::Display for JsonTableNamedColumn {
|
|||
self.path
|
||||
)?;
|
||||
if let Some(on_empty) = &self.on_empty {
|
||||
write!(f, " {} ON EMPTY", on_empty)?;
|
||||
write!(f, " {on_empty} ON EMPTY")?;
|
||||
}
|
||||
if let Some(on_error) = &self.on_error {
|
||||
write!(f, " {} ON ERROR", on_error)?;
|
||||
write!(f, " {on_error} ON ERROR")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -3449,7 +3449,7 @@ impl fmt::Display for JsonTableColumnErrorHandling {
|
|||
match self {
|
||||
JsonTableColumnErrorHandling::Null => write!(f, "NULL"),
|
||||
JsonTableColumnErrorHandling::Default(json_string) => {
|
||||
write!(f, "DEFAULT {}", json_string)
|
||||
write!(f, "DEFAULT {json_string}")
|
||||
}
|
||||
JsonTableColumnErrorHandling::Error => write!(f, "ERROR"),
|
||||
}
|
||||
|
@ -3491,15 +3491,19 @@ impl fmt::Display for OpenJsonTableColumn {
|
|||
}
|
||||
|
||||
/// BigQuery supports ValueTables which have 2 modes:
|
||||
/// `SELECT AS STRUCT`
|
||||
/// `SELECT AS VALUE`
|
||||
/// `SELECT [ALL | DISTINCT] AS STRUCT`
|
||||
/// `SELECT [ALL | DISTINCT] AS VALUE`
|
||||
///
|
||||
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#value_tables>
|
||||
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#select_list>
|
||||
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum ValueTableMode {
|
||||
AsStruct,
|
||||
AsValue,
|
||||
DistinctAsStruct,
|
||||
DistinctAsValue,
|
||||
}
|
||||
|
||||
impl fmt::Display for ValueTableMode {
|
||||
|
@ -3507,6 +3511,8 @@ impl fmt::Display for ValueTableMode {
|
|||
match self {
|
||||
ValueTableMode::AsStruct => write!(f, "AS STRUCT"),
|
||||
ValueTableMode::AsValue => write!(f, "AS VALUE"),
|
||||
ValueTableMode::DistinctAsStruct => write!(f, "DISTINCT AS STRUCT"),
|
||||
ValueTableMode::DistinctAsValue => write!(f, "DISTINCT AS VALUE"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3576,12 +3582,12 @@ impl fmt::Display for XmlTableColumn {
|
|||
default,
|
||||
nullable,
|
||||
} => {
|
||||
write!(f, " {}", r#type)?;
|
||||
write!(f, " {type}")?;
|
||||
if let Some(p) = path {
|
||||
write!(f, " PATH {}", p)?;
|
||||
write!(f, " PATH {p}")?;
|
||||
}
|
||||
if let Some(d) = default {
|
||||
write!(f, " DEFAULT {}", d)?;
|
||||
write!(f, " DEFAULT {d}")?;
|
||||
}
|
||||
if !*nullable {
|
||||
write!(f, " NOT NULL")?;
|
||||
|
@ -3612,7 +3618,7 @@ impl fmt::Display for XmlPassingArgument {
|
|||
}
|
||||
write!(f, "{}", self.expr)?;
|
||||
if let Some(alias) = &self.alias {
|
||||
write!(f, " AS {}", alias)?;
|
||||
write!(f, " AS {alias}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
use crate::ast::query::SelectItemQualifiedWildcardKind;
|
||||
use crate::ast::{query::SelectItemQualifiedWildcardKind, ColumnOptions};
|
||||
use core::iter;
|
||||
|
||||
use crate::tokenizer::Span;
|
||||
|
@ -28,16 +28,17 @@ use super::{
|
|||
ConstraintCharacteristics, CopySource, CreateIndex, CreateTable, CreateTableOptions, Cte,
|
||||
Delete, DoUpdate, ExceptSelectItem, ExcludeSelectItem, Expr, ExprWithAlias, Fetch, FromTable,
|
||||
Function, FunctionArg, FunctionArgExpr, FunctionArgumentClause, FunctionArgumentList,
|
||||
FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, Insert, Interpolate,
|
||||
InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem, LateralView,
|
||||
LimitClause, MatchRecognizePattern, Measure, NamedParenthesizedList, NamedWindowDefinition,
|
||||
ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction, OnInsert, OpenStatement,
|
||||
OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource, ProjectionSelect, Query,
|
||||
RaiseStatement, RaiseStatementValue, ReferentialAction, RenameSelectItem, ReplaceSelectElement,
|
||||
ReplaceSelectItem, Select, SelectInto, SelectItem, SetExpr, SqlOption, Statement, Subscript,
|
||||
SymbolDefinition, TableAlias, TableAliasColumnDef, TableConstraint, TableFactor, TableObject,
|
||||
TableOptionsClustered, TableWithJoins, UpdateTableFromKind, Use, Value, Values, ViewColumnDef,
|
||||
WhileStatement, WildcardAdditionalOptions, With, WithFill,
|
||||
FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, IndexColumn, Insert,
|
||||
Interpolate, InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem,
|
||||
LateralView, LimitClause, MatchRecognizePattern, Measure, NamedParenthesizedList,
|
||||
NamedWindowDefinition, ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction,
|
||||
OnInsert, OpenStatement, OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource,
|
||||
ProjectionSelect, Query, RaiseStatement, RaiseStatementValue, ReferentialAction,
|
||||
RenameSelectItem, ReplaceSelectElement, ReplaceSelectItem, Select, SelectInto, SelectItem,
|
||||
SetExpr, SqlOption, Statement, Subscript, SymbolDefinition, TableAlias, TableAliasColumnDef,
|
||||
TableConstraint, TableFactor, TableObject, TableOptionsClustered, TableWithJoins,
|
||||
UpdateTableFromKind, Use, Value, Values, ViewColumnDef, WhileStatement,
|
||||
WildcardAdditionalOptions, With, WithFill,
|
||||
};
|
||||
|
||||
/// Given an iterator of spans, return the [Span::union] of all spans.
|
||||
|
@ -311,7 +312,6 @@ impl Spanned for Statement {
|
|||
table_names,
|
||||
partitions,
|
||||
table: _,
|
||||
only: _,
|
||||
identity: _,
|
||||
cascade: _,
|
||||
on_cluster: _,
|
||||
|
@ -651,7 +651,7 @@ impl Spanned for TableConstraint {
|
|||
name.iter()
|
||||
.map(|i| i.span)
|
||||
.chain(index_name.iter().map(|i| i.span))
|
||||
.chain(columns.iter().map(|i| i.span))
|
||||
.chain(columns.iter().map(|i| i.span()))
|
||||
.chain(characteristics.iter().map(|i| i.span())),
|
||||
),
|
||||
TableConstraint::PrimaryKey {
|
||||
|
@ -665,12 +665,13 @@ impl Spanned for TableConstraint {
|
|||
name.iter()
|
||||
.map(|i| i.span)
|
||||
.chain(index_name.iter().map(|i| i.span))
|
||||
.chain(columns.iter().map(|i| i.span))
|
||||
.chain(columns.iter().map(|i| i.span()))
|
||||
.chain(characteristics.iter().map(|i| i.span())),
|
||||
),
|
||||
TableConstraint::ForeignKey {
|
||||
name,
|
||||
columns,
|
||||
index_name,
|
||||
foreign_table,
|
||||
referred_columns,
|
||||
on_delete,
|
||||
|
@ -679,6 +680,7 @@ impl Spanned for TableConstraint {
|
|||
} => union_spans(
|
||||
name.iter()
|
||||
.map(|i| i.span)
|
||||
.chain(index_name.iter().map(|i| i.span))
|
||||
.chain(columns.iter().map(|i| i.span))
|
||||
.chain(core::iter::once(foreign_table.span()))
|
||||
.chain(referred_columns.iter().map(|i| i.span))
|
||||
|
@ -686,9 +688,11 @@ impl Spanned for TableConstraint {
|
|||
.chain(on_update.iter().map(|i| i.span()))
|
||||
.chain(characteristics.iter().map(|i| i.span())),
|
||||
),
|
||||
TableConstraint::Check { name, expr } => {
|
||||
expr.span().union_opt(&name.as_ref().map(|i| i.span))
|
||||
}
|
||||
TableConstraint::Check {
|
||||
name,
|
||||
expr,
|
||||
enforced: _,
|
||||
} => expr.span().union_opt(&name.as_ref().map(|i| i.span)),
|
||||
TableConstraint::Index {
|
||||
display_as_key: _,
|
||||
name,
|
||||
|
@ -697,7 +701,7 @@ impl Spanned for TableConstraint {
|
|||
} => union_spans(
|
||||
name.iter()
|
||||
.map(|i| i.span)
|
||||
.chain(columns.iter().map(|i| i.span)),
|
||||
.chain(columns.iter().map(|i| i.span())),
|
||||
),
|
||||
TableConstraint::FulltextOrSpatial {
|
||||
fulltext: _,
|
||||
|
@ -708,7 +712,7 @@ impl Spanned for TableConstraint {
|
|||
opt_index_name
|
||||
.iter()
|
||||
.map(|i| i.span)
|
||||
.chain(columns.iter().map(|i| i.span)),
|
||||
.chain(columns.iter().map(|i| i.span())),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -742,6 +746,12 @@ impl Spanned for CreateIndex {
|
|||
}
|
||||
}
|
||||
|
||||
impl Spanned for IndexColumn {
|
||||
fn span(&self) -> Span {
|
||||
self.column.span()
|
||||
}
|
||||
}
|
||||
|
||||
impl Spanned for CaseStatement {
|
||||
fn span(&self) -> Span {
|
||||
let CaseStatement {
|
||||
|
@ -981,10 +991,13 @@ impl Spanned for ViewColumnDef {
|
|||
options,
|
||||
} = self;
|
||||
|
||||
union_spans(
|
||||
core::iter::once(name.span)
|
||||
.chain(options.iter().flat_map(|i| i.iter().map(|k| k.span()))),
|
||||
)
|
||||
name.span.union_opt(&options.as_ref().map(|o| o.span()))
|
||||
}
|
||||
}
|
||||
|
||||
impl Spanned for ColumnOptions {
|
||||
fn span(&self) -> Span {
|
||||
union_spans(self.as_slice().iter().map(|i| i.span()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1045,7 +1058,9 @@ impl Spanned for CreateTableOptions {
|
|||
match self {
|
||||
CreateTableOptions::None => Span::empty(),
|
||||
CreateTableOptions::With(vec) => union_spans(vec.iter().map(|i| i.span())),
|
||||
CreateTableOptions::Options(vec) => union_spans(vec.iter().map(|i| i.span())),
|
||||
CreateTableOptions::Options(vec) => {
|
||||
union_spans(vec.as_slice().iter().map(|i| i.span()))
|
||||
}
|
||||
CreateTableOptions::Plain(vec) => union_spans(vec.iter().map(|i| i.span())),
|
||||
CreateTableOptions::TableProperties(vec) => union_spans(vec.iter().map(|i| i.span())),
|
||||
}
|
||||
|
@ -1112,6 +1127,7 @@ impl Spanned for AlterTableOperation {
|
|||
.union_opt(&with_name.as_ref().map(|n| n.span)),
|
||||
AlterTableOperation::DropPrimaryKey => Span::empty(),
|
||||
AlterTableOperation::DropForeignKey { name } => name.span,
|
||||
AlterTableOperation::DropIndex { name } => name.span,
|
||||
AlterTableOperation::EnableAlwaysRule { name } => name.span,
|
||||
AlterTableOperation::EnableAlwaysTrigger { name } => name.span,
|
||||
AlterTableOperation::EnableReplicaRule { name } => name.span,
|
||||
|
@ -1566,18 +1582,24 @@ impl Spanned for Expr {
|
|||
),
|
||||
Expr::Prefixed { value, .. } => value.span(),
|
||||
Expr::Case {
|
||||
case_token,
|
||||
end_token,
|
||||
operand,
|
||||
conditions,
|
||||
else_result,
|
||||
} => union_spans(
|
||||
operand
|
||||
.as_ref()
|
||||
.map(|i| i.span())
|
||||
.into_iter()
|
||||
.chain(conditions.iter().flat_map(|case_when| {
|
||||
[case_when.condition.span(), case_when.result.span()]
|
||||
}))
|
||||
.chain(else_result.as_ref().map(|i| i.span())),
|
||||
iter::once(case_token.0.span)
|
||||
.chain(
|
||||
operand
|
||||
.as_ref()
|
||||
.map(|i| i.span())
|
||||
.into_iter()
|
||||
.chain(conditions.iter().flat_map(|case_when| {
|
||||
[case_when.condition.span(), case_when.result.span()]
|
||||
}))
|
||||
.chain(else_result.as_ref().map(|i| i.span())),
|
||||
)
|
||||
.chain(iter::once(end_token.0.span)),
|
||||
),
|
||||
Expr::Exists { subquery, .. } => subquery.span(),
|
||||
Expr::Subquery(query) => query.span(),
|
||||
|
@ -2463,4 +2485,16 @@ pub mod tests {
|
|||
|
||||
assert_eq!(test.get_source(body_span), "SELECT cte.* FROM cte");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_case_expr_span() {
|
||||
let dialect = &GenericDialect;
|
||||
let mut test = SpanTest::new(dialect, "CASE 1 WHEN 2 THEN 3 ELSE 4 END");
|
||||
let expr = test.0.parse_expr().unwrap();
|
||||
let expr_span = expr.span();
|
||||
assert_eq!(
|
||||
test.get_source(expr_span),
|
||||
"CASE 1 WHEN 2 THEN 3 ELSE 4 END"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -116,7 +116,6 @@ impl From<ValueWithSpan> for Value {
|
|||
derive(Visit, VisitMut),
|
||||
visit(with = "visit_value")
|
||||
)]
|
||||
|
||||
pub enum Value {
|
||||
/// Numeric literal
|
||||
#[cfg(not(feature = "bigdecimal"))]
|
||||
|
@ -551,16 +550,16 @@ impl fmt::Display for EscapeUnicodeStringLiteral<'_> {
|
|||
write!(f, r#"\\"#)?;
|
||||
}
|
||||
x if x.is_ascii() => {
|
||||
write!(f, "{}", c)?;
|
||||
write!(f, "{c}")?;
|
||||
}
|
||||
_ => {
|
||||
let codepoint = c as u32;
|
||||
// if the character fits in 32 bits, we can use the \XXXX format
|
||||
// otherwise, we need to use the \+XXXXXX format
|
||||
if codepoint <= 0xFFFF {
|
||||
write!(f, "\\{:04X}", codepoint)?;
|
||||
write!(f, "\\{codepoint:04X}")?;
|
||||
} else {
|
||||
write!(f, "\\+{:06X}", codepoint)?;
|
||||
write!(f, "\\+{codepoint:06X}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -926,10 +926,10 @@ mod tests {
|
|||
#[test]
|
||||
fn overflow() {
|
||||
let cond = (0..1000)
|
||||
.map(|n| format!("X = {}", n))
|
||||
.map(|n| format!("X = {n}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" OR ");
|
||||
let sql = format!("SELECT x where {0}", cond);
|
||||
let sql = format!("SELECT x where {cond}");
|
||||
|
||||
let dialect = GenericDialect {};
|
||||
let tokens = Tokenizer::new(&dialect, sql.as_str()).tokenize().unwrap();
|
||||
|
|
|
@ -46,7 +46,11 @@ pub struct BigQueryDialect;
|
|||
|
||||
impl Dialect for BigQueryDialect {
|
||||
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||
self.maybe_parse_statement(parser)
|
||||
if parser.parse_keyword(Keyword::BEGIN) {
|
||||
return Some(parser.parse_begin_exception_end());
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#identifiers>
|
||||
|
@ -141,48 +145,3 @@ impl Dialect for BigQueryDialect {
|
|||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl BigQueryDialect {
|
||||
fn maybe_parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||
if parser.peek_keyword(Keyword::BEGIN) {
|
||||
return Some(self.parse_begin(parser));
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Parse a `BEGIN` statement.
|
||||
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend>
|
||||
fn parse_begin(&self, parser: &mut Parser) -> Result<Statement, ParserError> {
|
||||
parser.expect_keyword(Keyword::BEGIN)?;
|
||||
|
||||
let statements = parser.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
|
||||
|
||||
let has_exception_when_clause = parser.parse_keywords(&[
|
||||
Keyword::EXCEPTION,
|
||||
Keyword::WHEN,
|
||||
Keyword::ERROR,
|
||||
Keyword::THEN,
|
||||
]);
|
||||
let exception_statements = if has_exception_when_clause {
|
||||
if !parser.peek_keyword(Keyword::END) {
|
||||
Some(parser.parse_statement_list(&[Keyword::END])?)
|
||||
} else {
|
||||
Some(Default::default())
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
parser.expect_keyword(Keyword::END)?;
|
||||
|
||||
Ok(Statement::StartTransaction {
|
||||
begin: true,
|
||||
statements,
|
||||
exception_statements,
|
||||
has_end_keyword: true,
|
||||
transaction: None,
|
||||
modifier: None,
|
||||
modes: Default::default(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,6 +52,10 @@ impl Dialect for GenericDialect {
|
|||
true
|
||||
}
|
||||
|
||||
fn supports_left_associative_joins_without_parens(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_connect_by(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
|
|
@ -278,6 +278,34 @@ pub trait Dialect: Debug + Any {
|
|||
false
|
||||
}
|
||||
|
||||
/// Indicates whether the dialect supports left-associative join parsing
|
||||
/// by default when parentheses are omitted in nested joins.
|
||||
///
|
||||
/// Most dialects (like MySQL or Postgres) assume **left-associative** precedence,
|
||||
/// so a query like:
|
||||
///
|
||||
/// ```sql
|
||||
/// SELECT * FROM t1 NATURAL JOIN t5 INNER JOIN t0 ON ...
|
||||
/// ```
|
||||
/// is interpreted as:
|
||||
/// ```sql
|
||||
/// ((t1 NATURAL JOIN t5) INNER JOIN t0 ON ...)
|
||||
/// ```
|
||||
/// and internally represented as a **flat list** of joins.
|
||||
///
|
||||
/// In contrast, some dialects (e.g. **Snowflake**) assume **right-associative**
|
||||
/// precedence and interpret the same query as:
|
||||
/// ```sql
|
||||
/// (t1 NATURAL JOIN (t5 INNER JOIN t0 ON ...))
|
||||
/// ```
|
||||
/// which results in a **nested join** structure in the AST.
|
||||
///
|
||||
/// If this method returns `false`, the parser must build nested join trees
|
||||
/// even in the absence of parentheses to reflect the correct associativity
|
||||
fn supports_left_associative_joins_without_parens(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// Returns true if the dialect supports the `(+)` syntax for OUTER JOIN.
|
||||
fn supports_outer_join_operator(&self) -> bool {
|
||||
false
|
||||
|
@ -587,7 +615,7 @@ pub trait Dialect: Debug + Any {
|
|||
}
|
||||
|
||||
let token = parser.peek_token();
|
||||
debug!("get_next_precedence_full() {:?}", token);
|
||||
debug!("get_next_precedence_full() {token:?}");
|
||||
match token.token {
|
||||
Token::Word(w) if w.keyword == Keyword::OR => Ok(p!(Or)),
|
||||
Token::Word(w) if w.keyword == Keyword::AND => Ok(p!(And)),
|
||||
|
@ -1028,6 +1056,10 @@ pub trait Dialect: Debug + Any {
|
|||
fn supports_set_names(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn supports_space_separated_column_options(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// This represents the operators for which precedence must be defined
|
||||
|
|
|
@ -104,7 +104,7 @@ impl Dialect for PostgreSqlDialect {
|
|||
|
||||
fn get_next_precedence(&self, parser: &Parser) -> Option<Result<u8, ParserError>> {
|
||||
let token = parser.peek_token();
|
||||
debug!("get_next_precedence() {:?}", token);
|
||||
debug!("get_next_precedence() {token:?}");
|
||||
|
||||
// we only return some custom value here when the behaviour (not merely the numeric value) differs
|
||||
// from the default implementation
|
||||
|
|
|
@ -131,6 +131,10 @@ impl Dialect for SnowflakeDialect {
|
|||
}
|
||||
|
||||
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||
if parser.parse_keyword(Keyword::BEGIN) {
|
||||
return Some(parser.parse_begin_exception_end());
|
||||
}
|
||||
|
||||
if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) {
|
||||
// ALTER SESSION
|
||||
let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) {
|
||||
|
@ -279,6 +283,10 @@ impl Dialect for SnowflakeDialect {
|
|||
true
|
||||
}
|
||||
|
||||
fn supports_left_associative_joins_without_parens(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn is_reserved_for_identifier(&self, kw: Keyword) -> bool {
|
||||
// Unreserve some keywords that Snowflake accepts as identifiers
|
||||
// See: https://docs.snowflake.com/en/sql-reference/reserved-keywords
|
||||
|
@ -352,6 +360,10 @@ impl Dialect for SnowflakeDialect {
|
|||
fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
|
||||
&RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR
|
||||
}
|
||||
|
||||
fn supports_space_separated_column_options(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result<Statement, ParserError> {
|
||||
|
@ -453,7 +465,7 @@ pub fn parse_create_table(
|
|||
parser.expect_keyword_is(Keyword::BY)?;
|
||||
parser.expect_token(&Token::LParen)?;
|
||||
let cluster_by = Some(WrappedCollection::Parentheses(
|
||||
parser.parse_comma_separated(|p| p.parse_identifier())?,
|
||||
parser.parse_comma_separated(|p| p.parse_expr())?,
|
||||
));
|
||||
parser.expect_token(&Token::RParen)?;
|
||||
|
||||
|
@ -560,6 +572,9 @@ pub fn parse_create_table(
|
|||
builder.storage_serialization_policy =
|
||||
Some(parse_storage_serialization_policy(parser)?);
|
||||
}
|
||||
Keyword::IF if parser.parse_keywords(&[Keyword::NOT, Keyword::EXISTS]) => {
|
||||
builder = builder.if_not_exists(true);
|
||||
}
|
||||
_ => {
|
||||
return parser.expected("end of statement", next_token);
|
||||
}
|
||||
|
|
|
@ -1,3 +1,20 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
//! Utilities for formatting SQL AST nodes with pretty printing support.
|
||||
//!
|
||||
//! The module provides formatters that implement the `Display` trait with support
|
||||
|
|
|
@ -395,6 +395,7 @@ define_keywords!(
|
|||
FUNCTION,
|
||||
FUNCTIONS,
|
||||
FUSION,
|
||||
FUTURE,
|
||||
GENERAL,
|
||||
GENERATE,
|
||||
GENERATED,
|
||||
|
@ -646,6 +647,7 @@ define_keywords!(
|
|||
ORDER,
|
||||
ORDINALITY,
|
||||
ORGANIZATION,
|
||||
OTHER,
|
||||
OUT,
|
||||
OUTER,
|
||||
OUTPUT,
|
||||
|
@ -934,6 +936,8 @@ define_keywords!(
|
|||
TRY,
|
||||
TRY_CAST,
|
||||
TRY_CONVERT,
|
||||
TSQUERY,
|
||||
TSVECTOR,
|
||||
TUPLE,
|
||||
TYPE,
|
||||
UBIGINT,
|
||||
|
|
|
@ -436,7 +436,7 @@ impl<'a> Parser<'a> {
|
|||
///
|
||||
/// See example on [`Parser::new()`] for an example
|
||||
pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
|
||||
debug!("Parsing sql '{}'...", sql);
|
||||
debug!("Parsing sql '{sql}'...");
|
||||
let tokens = Tokenizer::new(self.dialect, sql)
|
||||
.with_unescape(self.options.unescape)
|
||||
.tokenize_with_location()?;
|
||||
|
@ -960,12 +960,13 @@ impl<'a> Parser<'a> {
|
|||
|
||||
pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
|
||||
let table = self.parse_keyword(Keyword::TABLE);
|
||||
let only = self.parse_keyword(Keyword::ONLY);
|
||||
|
||||
let table_names = self
|
||||
.parse_comma_separated(|p| p.parse_object_name(false))?
|
||||
.parse_comma_separated(|p| {
|
||||
Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
|
||||
})?
|
||||
.into_iter()
|
||||
.map(|n| TruncateTableTarget { name: n })
|
||||
.map(|(only, name)| TruncateTableTarget { name, only })
|
||||
.collect();
|
||||
|
||||
let mut partitions = None;
|
||||
|
@ -996,7 +997,6 @@ impl<'a> Parser<'a> {
|
|||
table_names,
|
||||
partitions,
|
||||
table,
|
||||
only,
|
||||
identity,
|
||||
cascade,
|
||||
on_cluster,
|
||||
|
@ -1226,10 +1226,10 @@ impl<'a> Parser<'a> {
|
|||
|
||||
expr = self.parse_compound_expr(expr, vec![])?;
|
||||
|
||||
debug!("prefix: {:?}", expr);
|
||||
debug!("prefix: {expr:?}");
|
||||
loop {
|
||||
let next_precedence = self.get_next_precedence()?;
|
||||
debug!("next precedence: {:?}", next_precedence);
|
||||
debug!("next precedence: {next_precedence:?}");
|
||||
|
||||
if precedence >= next_precedence {
|
||||
break;
|
||||
|
@ -1631,8 +1631,7 @@ impl<'a> Parser<'a> {
|
|||
Token::QuestionPipe => UnaryOperator::QuestionPipe,
|
||||
_ => {
|
||||
return Err(ParserError::ParserError(format!(
|
||||
"Unexpected token in unary operator parsing: {:?}",
|
||||
tok
|
||||
"Unexpected token in unary operator parsing: {tok:?}"
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
@ -2274,6 +2273,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
|
||||
let case_token = AttachedToken(self.get_current_token().clone());
|
||||
let mut operand = None;
|
||||
if !self.parse_keyword(Keyword::WHEN) {
|
||||
operand = Some(Box::new(self.parse_expr()?));
|
||||
|
@ -2294,8 +2294,10 @@ impl<'a> Parser<'a> {
|
|||
} else {
|
||||
None
|
||||
};
|
||||
self.expect_keyword_is(Keyword::END)?;
|
||||
let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
|
||||
Ok(Expr::Case {
|
||||
case_token,
|
||||
end_token,
|
||||
operand,
|
||||
conditions,
|
||||
else_result,
|
||||
|
@ -2768,7 +2770,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
if self.dialect.supports_dictionary_syntax() {
|
||||
self.prev_token(); // Put back the '{'
|
||||
return self.parse_duckdb_struct_literal();
|
||||
return self.parse_dictionary();
|
||||
}
|
||||
|
||||
self.expected("an expression", token)
|
||||
|
@ -3031,7 +3033,6 @@ impl<'a> Parser<'a> {
|
|||
where
|
||||
F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
|
||||
{
|
||||
let start_token = self.peek_token();
|
||||
self.expect_keyword_is(Keyword::STRUCT)?;
|
||||
|
||||
// Nothing to do if we have no type information.
|
||||
|
@ -3044,16 +3045,10 @@ impl<'a> Parser<'a> {
|
|||
let trailing_bracket = loop {
|
||||
let (def, trailing_bracket) = elem_parser(self)?;
|
||||
field_defs.push(def);
|
||||
if !self.consume_token(&Token::Comma) {
|
||||
// The struct field definition is finished if it occurs `>>` or comma.
|
||||
if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
|
||||
break trailing_bracket;
|
||||
}
|
||||
|
||||
// Angle brackets are balanced so we only expect the trailing `>>` after
|
||||
// we've matched all field types for the current struct.
|
||||
// e.g. this is invalid syntax `STRUCT<STRUCT<INT>>>, INT>(NULL)`
|
||||
if trailing_bracket.0 {
|
||||
return parser_err!("unmatched > in STRUCT definition", start_token.span.start);
|
||||
}
|
||||
};
|
||||
|
||||
Ok((
|
||||
|
@ -3073,6 +3068,7 @@ impl<'a> Parser<'a> {
|
|||
Ok(StructField {
|
||||
field_name: Some(field_name),
|
||||
field_type,
|
||||
options: None,
|
||||
})
|
||||
});
|
||||
self.expect_token(&Token::RParen)?;
|
||||
|
@ -3106,10 +3102,12 @@ impl<'a> Parser<'a> {
|
|||
|
||||
let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
|
||||
|
||||
let options = self.maybe_parse_options(Keyword::OPTIONS)?;
|
||||
Ok((
|
||||
StructField {
|
||||
field_name,
|
||||
field_type,
|
||||
options,
|
||||
},
|
||||
trailing_bracket,
|
||||
))
|
||||
|
@ -3141,7 +3139,7 @@ impl<'a> Parser<'a> {
|
|||
Ok(fields)
|
||||
}
|
||||
|
||||
/// DuckDB specific: Parse a duckdb [dictionary]
|
||||
/// DuckDB and ClickHouse specific: Parse a duckdb [dictionary] or a clickhouse [map] setting
|
||||
///
|
||||
/// Syntax:
|
||||
///
|
||||
|
@ -3150,18 +3148,18 @@ impl<'a> Parser<'a> {
|
|||
/// ```
|
||||
///
|
||||
/// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
|
||||
fn parse_duckdb_struct_literal(&mut self) -> Result<Expr, ParserError> {
|
||||
/// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
|
||||
fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
|
||||
self.expect_token(&Token::LBrace)?;
|
||||
|
||||
let fields =
|
||||
self.parse_comma_separated0(Self::parse_duckdb_dictionary_field, Token::RBrace)?;
|
||||
let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
|
||||
|
||||
self.expect_token(&Token::RBrace)?;
|
||||
|
||||
Ok(Expr::Dictionary(fields))
|
||||
}
|
||||
|
||||
/// Parse a field for a duckdb [dictionary]
|
||||
/// Parse a field for a duckdb [dictionary] or a clickhouse [map] setting
|
||||
///
|
||||
/// Syntax
|
||||
///
|
||||
|
@ -3170,7 +3168,8 @@ impl<'a> Parser<'a> {
|
|||
/// ```
|
||||
///
|
||||
/// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
|
||||
fn parse_duckdb_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
|
||||
/// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
|
||||
fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
|
||||
let key = self.parse_identifier()?;
|
||||
|
||||
self.expect_token(&Token::Colon)?;
|
||||
|
@ -3818,7 +3817,7 @@ impl<'a> Parser<'a> {
|
|||
});
|
||||
}
|
||||
self.expect_token(&Token::LParen)?;
|
||||
let in_op = match self.maybe_parse(|p| p.parse_query_body(p.dialect.prec_unknown()))? {
|
||||
let in_op = match self.maybe_parse(|p| p.parse_query())? {
|
||||
Some(subquery) => Expr::InSubquery {
|
||||
expr: Box::new(expr),
|
||||
subquery,
|
||||
|
@ -4859,6 +4858,12 @@ impl<'a> Parser<'a> {
|
|||
None
|
||||
};
|
||||
|
||||
let with = if self.peek_keyword(Keyword::WITH) {
|
||||
Some(self.parse_options(Keyword::WITH)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let options = if self.peek_keyword(Keyword::OPTIONS) {
|
||||
Some(self.parse_options(Keyword::OPTIONS)?)
|
||||
} else {
|
||||
|
@ -4868,6 +4873,7 @@ impl<'a> Parser<'a> {
|
|||
Ok(Statement::CreateSchema {
|
||||
schema_name,
|
||||
if_not_exists,
|
||||
with,
|
||||
options,
|
||||
default_collate_spec,
|
||||
})
|
||||
|
@ -6862,9 +6868,7 @@ impl<'a> Parser<'a> {
|
|||
None
|
||||
};
|
||||
|
||||
self.expect_token(&Token::LParen)?;
|
||||
let columns = self.parse_comma_separated(Parser::parse_create_index_expr)?;
|
||||
self.expect_token(&Token::RParen)?;
|
||||
let columns = self.parse_parenthesized_index_column_list()?;
|
||||
|
||||
let include = if self.parse_keyword(Keyword::INCLUDE) {
|
||||
self.expect_token(&Token::LParen)?;
|
||||
|
@ -7306,7 +7310,7 @@ impl<'a> Parser<'a> {
|
|||
if dialect_of!(self is BigQueryDialect | GenericDialect) {
|
||||
if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
|
||||
cluster_by = Some(WrappedCollection::NoWrapping(
|
||||
self.parse_comma_separated(|p| p.parse_identifier())?,
|
||||
self.parse_comma_separated(|p| p.parse_expr())?,
|
||||
));
|
||||
};
|
||||
|
||||
|
@ -7620,9 +7624,22 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
|
||||
let mode = if self.parse_keyword(Keyword::IN) {
|
||||
Some(ArgMode::In)
|
||||
} else if self.parse_keyword(Keyword::OUT) {
|
||||
Some(ArgMode::Out)
|
||||
} else if self.parse_keyword(Keyword::INOUT) {
|
||||
Some(ArgMode::InOut)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let name = self.parse_identifier()?;
|
||||
let data_type = self.parse_data_type()?;
|
||||
Ok(ProcedureParam { name, data_type })
|
||||
Ok(ProcedureParam {
|
||||
name,
|
||||
data_type,
|
||||
mode,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
|
||||
|
@ -8061,10 +8078,10 @@ impl<'a> Parser<'a> {
|
|||
let nulls_distinct = self.parse_optional_nulls_distinct()?;
|
||||
|
||||
// optional index name
|
||||
let index_name = self.parse_optional_indent()?;
|
||||
let index_name = self.parse_optional_ident()?;
|
||||
let index_type = self.parse_optional_using_then_index_type()?;
|
||||
|
||||
let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
|
||||
let columns = self.parse_parenthesized_index_column_list()?;
|
||||
let index_options = self.parse_index_options()?;
|
||||
let characteristics = self.parse_constraint_characteristics()?;
|
||||
Ok(Some(TableConstraint::Unique {
|
||||
|
@ -8083,10 +8100,10 @@ impl<'a> Parser<'a> {
|
|||
self.expect_keyword_is(Keyword::KEY)?;
|
||||
|
||||
// optional index name
|
||||
let index_name = self.parse_optional_indent()?;
|
||||
let index_name = self.parse_optional_ident()?;
|
||||
let index_type = self.parse_optional_using_then_index_type()?;
|
||||
|
||||
let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
|
||||
let columns = self.parse_parenthesized_index_column_list()?;
|
||||
let index_options = self.parse_index_options()?;
|
||||
let characteristics = self.parse_constraint_characteristics()?;
|
||||
Ok(Some(TableConstraint::PrimaryKey {
|
||||
|
@ -8100,6 +8117,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
Token::Word(w) if w.keyword == Keyword::FOREIGN => {
|
||||
self.expect_keyword_is(Keyword::KEY)?;
|
||||
let index_name = self.parse_optional_ident()?;
|
||||
let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
|
||||
self.expect_keyword_is(Keyword::REFERENCES)?;
|
||||
let foreign_table = self.parse_object_name(false)?;
|
||||
|
@ -8122,6 +8140,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
Ok(Some(TableConstraint::ForeignKey {
|
||||
name,
|
||||
index_name,
|
||||
columns,
|
||||
foreign_table,
|
||||
referred_columns,
|
||||
|
@ -8134,7 +8153,20 @@ impl<'a> Parser<'a> {
|
|||
self.expect_token(&Token::LParen)?;
|
||||
let expr = Box::new(self.parse_expr()?);
|
||||
self.expect_token(&Token::RParen)?;
|
||||
Ok(Some(TableConstraint::Check { name, expr }))
|
||||
|
||||
let enforced = if self.parse_keyword(Keyword::ENFORCED) {
|
||||
Some(true)
|
||||
} else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
|
||||
Some(false)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Some(TableConstraint::Check {
|
||||
name,
|
||||
expr,
|
||||
enforced,
|
||||
}))
|
||||
}
|
||||
Token::Word(w)
|
||||
if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
|
||||
|
@ -8145,11 +8177,11 @@ impl<'a> Parser<'a> {
|
|||
|
||||
let name = match self.peek_token().token {
|
||||
Token::Word(word) if word.keyword == Keyword::USING => None,
|
||||
_ => self.parse_optional_indent()?,
|
||||
_ => self.parse_optional_ident()?,
|
||||
};
|
||||
|
||||
let index_type = self.parse_optional_using_then_index_type()?;
|
||||
let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
|
||||
let columns = self.parse_parenthesized_index_column_list()?;
|
||||
|
||||
Ok(Some(TableConstraint::Index {
|
||||
display_as_key,
|
||||
|
@ -8176,9 +8208,9 @@ impl<'a> Parser<'a> {
|
|||
|
||||
let index_type_display = self.parse_index_type_display();
|
||||
|
||||
let opt_index_name = self.parse_optional_indent()?;
|
||||
let opt_index_name = self.parse_optional_ident()?;
|
||||
|
||||
let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
|
||||
let columns = self.parse_parenthesized_index_column_list()?;
|
||||
|
||||
Ok(Some(TableConstraint::FulltextOrSpatial {
|
||||
fulltext,
|
||||
|
@ -8286,7 +8318,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// Parse `[ident]`, mostly `ident` is name, like:
|
||||
/// `window_name`, `index_name`, ...
|
||||
pub fn parse_optional_indent(&mut self) -> Result<Option<Ident>, ParserError> {
|
||||
pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
|
||||
self.maybe_parse(|parser| parser.parse_identifier())
|
||||
}
|
||||
|
||||
|
@ -8611,6 +8643,9 @@ impl<'a> Parser<'a> {
|
|||
} else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
|
||||
let name = self.parse_identifier()?;
|
||||
AlterTableOperation::DropForeignKey { name }
|
||||
} else if self.parse_keyword(Keyword::INDEX) {
|
||||
let name = self.parse_identifier()?;
|
||||
AlterTableOperation::DropIndex { name }
|
||||
} else if self.parse_keyword(Keyword::PROJECTION)
|
||||
&& dialect_of!(self is ClickHouseDialect|GenericDialect)
|
||||
{
|
||||
|
@ -9885,6 +9920,12 @@ impl<'a> Parser<'a> {
|
|||
Ok(DataType::Unsigned)
|
||||
}
|
||||
}
|
||||
Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
|
||||
Ok(DataType::TsVector)
|
||||
}
|
||||
Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
|
||||
Ok(DataType::TsQuery)
|
||||
}
|
||||
_ => {
|
||||
self.prev_token();
|
||||
let type_name = self.parse_object_name(false)?;
|
||||
|
@ -10581,17 +10622,7 @@ impl<'a> Parser<'a> {
|
|||
/// Parses a column definition within a view.
|
||||
fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
|
||||
let name = self.parse_identifier()?;
|
||||
let options = if (dialect_of!(self is BigQueryDialect | GenericDialect)
|
||||
&& self.parse_keyword(Keyword::OPTIONS))
|
||||
|| (dialect_of!(self is SnowflakeDialect | GenericDialect)
|
||||
&& self.parse_keyword(Keyword::COMMENT))
|
||||
{
|
||||
self.prev_token();
|
||||
self.parse_optional_column_option()?
|
||||
.map(|option| vec![option])
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let options = self.parse_view_column_options()?;
|
||||
let data_type = if dialect_of!(self is ClickHouseDialect) {
|
||||
Some(self.parse_data_type()?)
|
||||
} else {
|
||||
|
@ -10604,6 +10635,25 @@ impl<'a> Parser<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
|
||||
let mut options = Vec::new();
|
||||
loop {
|
||||
let option = self.parse_optional_column_option()?;
|
||||
if let Some(option) = option {
|
||||
options.push(option);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if options.is_empty() {
|
||||
Ok(None)
|
||||
} else if self.dialect.supports_space_separated_column_options() {
|
||||
Ok(Some(ColumnOptions::SpaceSeparated(options)))
|
||||
} else {
|
||||
Ok(Some(ColumnOptions::CommaSeparated(options)))
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers.
|
||||
/// For example: `(col1, "col 2", ...)`
|
||||
pub fn parse_parenthesized_column_list(
|
||||
|
@ -10614,6 +10664,14 @@ impl<'a> Parser<'a> {
|
|||
self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
|
||||
}
|
||||
|
||||
/// Parses a parenthesized comma-separated list of index columns, which can be arbitrary
|
||||
/// expressions with ordering information (and an opclass in some dialects).
|
||||
fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
|
||||
self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
|
||||
p.parse_create_index_expr()
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers.
|
||||
/// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)`
|
||||
pub fn parse_parenthesized_qualified_column_list(
|
||||
|
@ -11326,7 +11384,7 @@ impl<'a> Parser<'a> {
|
|||
let key_values = self.parse_comma_separated(|p| {
|
||||
let key = p.parse_identifier()?;
|
||||
p.expect_token(&Token::Eq)?;
|
||||
let value = p.parse_value()?.value;
|
||||
let value = p.parse_expr()?;
|
||||
Ok(Setting { key, value })
|
||||
})?;
|
||||
Some(key_values)
|
||||
|
@ -11644,18 +11702,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
let select_token = self.expect_keyword(Keyword::SELECT)?;
|
||||
let value_table_mode =
|
||||
if dialect_of!(self is BigQueryDialect) && self.parse_keyword(Keyword::AS) {
|
||||
if self.parse_keyword(Keyword::VALUE) {
|
||||
Some(ValueTableMode::AsValue)
|
||||
} else if self.parse_keyword(Keyword::STRUCT) {
|
||||
Some(ValueTableMode::AsStruct)
|
||||
} else {
|
||||
self.expected("VALUE or STRUCT", self.peek_token())?
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let value_table_mode = self.parse_value_table_mode()?;
|
||||
|
||||
let mut top_before_distinct = false;
|
||||
let mut top = None;
|
||||
|
@ -11756,7 +11803,7 @@ impl<'a> Parser<'a> {
|
|||
};
|
||||
|
||||
let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
|
||||
self.parse_comma_separated(Parser::parse_expr)?
|
||||
self.parse_comma_separated(Parser::parse_order_by_expr)?
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
@ -11831,6 +11878,32 @@ impl<'a> Parser<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
|
||||
if !dialect_of!(self is BigQueryDialect) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
|
||||
Some(ValueTableMode::DistinctAsValue)
|
||||
} else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
|
||||
Some(ValueTableMode::DistinctAsStruct)
|
||||
} else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
|
||||
|| self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
|
||||
{
|
||||
Some(ValueTableMode::AsValue)
|
||||
} else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
|
||||
|| self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
|
||||
{
|
||||
Some(ValueTableMode::AsStruct)
|
||||
} else if self.parse_keyword(Keyword::AS) {
|
||||
self.expected("VALUE or STRUCT", self.peek_token())?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(mode)
|
||||
}
|
||||
|
||||
/// Invoke `f` after first setting the parser's `ParserState` to `state`.
|
||||
///
|
||||
/// Upon return, restores the parser's state to what it started at.
|
||||
|
@ -12589,7 +12662,11 @@ impl<'a> Parser<'a> {
|
|||
};
|
||||
let mut relation = self.parse_table_factor()?;
|
||||
|
||||
if self.peek_parens_less_nested_join() {
|
||||
if !self
|
||||
.dialect
|
||||
.supports_left_associative_joins_without_parens()
|
||||
&& self.peek_parens_less_nested_join()
|
||||
{
|
||||
let joins = self.parse_joins()?;
|
||||
relation = TableFactor::NestedJoin {
|
||||
table_with_joins: Box::new(TableWithJoins { relation, joins }),
|
||||
|
@ -13749,7 +13826,7 @@ impl<'a> Parser<'a> {
|
|||
let ident = self.parse_identifier()?;
|
||||
if let GranteeName::ObjectName(namespace) = name {
|
||||
name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
|
||||
format!("{}:{}", namespace, ident),
|
||||
format!("{namespace}:{ident}"),
|
||||
)]));
|
||||
};
|
||||
}
|
||||
|
@ -13786,6 +13863,33 @@ impl<'a> Parser<'a> {
|
|||
Some(GrantObjects::AllTablesInSchema {
|
||||
schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
|
||||
})
|
||||
} else if self.parse_keywords(&[
|
||||
Keyword::FUTURE,
|
||||
Keyword::SCHEMAS,
|
||||
Keyword::IN,
|
||||
Keyword::DATABASE,
|
||||
]) {
|
||||
Some(GrantObjects::FutureSchemasInDatabase {
|
||||
databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
|
||||
})
|
||||
} else if self.parse_keywords(&[
|
||||
Keyword::FUTURE,
|
||||
Keyword::TABLES,
|
||||
Keyword::IN,
|
||||
Keyword::SCHEMA,
|
||||
]) {
|
||||
Some(GrantObjects::FutureTablesInSchema {
|
||||
schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
|
||||
})
|
||||
} else if self.parse_keywords(&[
|
||||
Keyword::FUTURE,
|
||||
Keyword::VIEWS,
|
||||
Keyword::IN,
|
||||
Keyword::SCHEMA,
|
||||
]) {
|
||||
Some(GrantObjects::FutureViewsInSchema {
|
||||
schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
|
||||
})
|
||||
} else if self.parse_keywords(&[
|
||||
Keyword::ALL,
|
||||
Keyword::SEQUENCES,
|
||||
|
@ -14719,7 +14823,7 @@ impl<'a> Parser<'a> {
|
|||
self.dialect
|
||||
.get_reserved_keywords_for_select_item_operator(),
|
||||
)
|
||||
.map(|keyword| Ident::new(format!("{:?}", keyword)));
|
||||
.map(|keyword| Ident::new(format!("{keyword:?}")));
|
||||
|
||||
match self.parse_wildcard_expr()? {
|
||||
Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
|
||||
|
@ -15139,7 +15243,8 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// Parse a FETCH clause
|
||||
pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
|
||||
self.expect_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])?;
|
||||
let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
|
||||
|
||||
let (quantity, percent) = if self
|
||||
.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
|
||||
.is_some()
|
||||
|
@ -15148,16 +15253,16 @@ impl<'a> Parser<'a> {
|
|||
} else {
|
||||
let quantity = Expr::Value(self.parse_value()?);
|
||||
let percent = self.parse_keyword(Keyword::PERCENT);
|
||||
self.expect_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])?;
|
||||
let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
|
||||
(Some(quantity), percent)
|
||||
};
|
||||
|
||||
let with_ties = if self.parse_keyword(Keyword::ONLY) {
|
||||
false
|
||||
} else if self.parse_keywords(&[Keyword::WITH, Keyword::TIES]) {
|
||||
true
|
||||
} else {
|
||||
return self.expected("one of ONLY or WITH TIES", self.peek_token());
|
||||
self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
|
||||
};
|
||||
|
||||
Ok(Fetch {
|
||||
with_ties,
|
||||
percent,
|
||||
|
@ -15220,7 +15325,7 @@ impl<'a> Parser<'a> {
|
|||
transaction: Some(BeginTransactionKind::Transaction),
|
||||
modifier: None,
|
||||
statements: vec![],
|
||||
exception_statements: None,
|
||||
exception: None,
|
||||
has_end_keyword: false,
|
||||
})
|
||||
}
|
||||
|
@ -15252,11 +15357,56 @@ impl<'a> Parser<'a> {
|
|||
transaction,
|
||||
modifier,
|
||||
statements: vec![],
|
||||
exception_statements: None,
|
||||
exception: None,
|
||||
has_end_keyword: false,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
|
||||
let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
|
||||
|
||||
let exception = if self.parse_keyword(Keyword::EXCEPTION) {
|
||||
let mut when = Vec::new();
|
||||
|
||||
// We can have multiple `WHEN` arms so we consume all cases until `END`
|
||||
while !self.peek_keyword(Keyword::END) {
|
||||
self.expect_keyword(Keyword::WHEN)?;
|
||||
|
||||
// Each `WHEN` case can have one or more conditions, e.g.
|
||||
// WHEN EXCEPTION_1 [OR EXCEPTION_2] THEN
|
||||
// So we parse identifiers until the `THEN` keyword.
|
||||
let mut idents = Vec::new();
|
||||
|
||||
while !self.parse_keyword(Keyword::THEN) {
|
||||
let ident = self.parse_identifier()?;
|
||||
idents.push(ident);
|
||||
|
||||
self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
|
||||
}
|
||||
|
||||
let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
|
||||
|
||||
when.push(ExceptionWhen { idents, statements });
|
||||
}
|
||||
|
||||
Some(when)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
self.expect_keyword(Keyword::END)?;
|
||||
|
||||
Ok(Statement::StartTransaction {
|
||||
begin: true,
|
||||
statements,
|
||||
exception,
|
||||
has_end_keyword: true,
|
||||
transaction: None,
|
||||
modifier: None,
|
||||
modes: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
|
||||
let modifier = if !self.dialect.supports_end_transaction_modifier() {
|
||||
None
|
||||
|
@ -15850,6 +16000,13 @@ impl<'a> Parser<'a> {
|
|||
pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
|
||||
let name = self.parse_object_name(false)?;
|
||||
let params = self.parse_optional_procedure_parameters()?;
|
||||
|
||||
let language = if self.parse_keyword(Keyword::LANGUAGE) {
|
||||
Some(self.parse_identifier()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
self.expect_keyword_is(Keyword::AS)?;
|
||||
|
||||
let body = self.parse_conditional_statements(&[Keyword::END])?;
|
||||
|
@ -15858,6 +16015,7 @@ impl<'a> Parser<'a> {
|
|||
name,
|
||||
or_alter,
|
||||
params,
|
||||
language,
|
||||
body,
|
||||
})
|
||||
}
|
||||
|
@ -15865,7 +16023,7 @@ impl<'a> Parser<'a> {
|
|||
pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
|
||||
let window_name = match self.peek_token().token {
|
||||
Token::Word(word) if word.keyword == Keyword::NoKeyword => {
|
||||
self.parse_optional_indent()?
|
||||
self.parse_optional_ident()?
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
@ -16604,6 +16762,20 @@ mod tests {
|
|||
}};
|
||||
}
|
||||
|
||||
fn mk_expected_col(name: &str) -> IndexColumn {
|
||||
IndexColumn {
|
||||
column: OrderByExpr {
|
||||
expr: Expr::Identifier(name.into()),
|
||||
options: OrderByOptions {
|
||||
asc: None,
|
||||
nulls_first: None,
|
||||
},
|
||||
with_fill: None,
|
||||
},
|
||||
operator_class: None,
|
||||
}
|
||||
}
|
||||
|
||||
let dialect =
|
||||
TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
|
||||
|
||||
|
@ -16614,7 +16786,7 @@ mod tests {
|
|||
display_as_key: false,
|
||||
name: None,
|
||||
index_type: None,
|
||||
columns: vec![Ident::new("c1")],
|
||||
columns: vec![mk_expected_col("c1")],
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -16625,7 +16797,7 @@ mod tests {
|
|||
display_as_key: true,
|
||||
name: None,
|
||||
index_type: None,
|
||||
columns: vec![Ident::new("c1")],
|
||||
columns: vec![mk_expected_col("c1")],
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -16636,7 +16808,7 @@ mod tests {
|
|||
display_as_key: false,
|
||||
name: Some(Ident::with_quote('\'', "index")),
|
||||
index_type: None,
|
||||
columns: vec![Ident::new("c1"), Ident::new("c2")],
|
||||
columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -16647,7 +16819,7 @@ mod tests {
|
|||
display_as_key: false,
|
||||
name: None,
|
||||
index_type: Some(IndexType::BTree),
|
||||
columns: vec![Ident::new("c1")],
|
||||
columns: vec![mk_expected_col("c1")],
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -16658,7 +16830,7 @@ mod tests {
|
|||
display_as_key: false,
|
||||
name: None,
|
||||
index_type: Some(IndexType::Hash),
|
||||
columns: vec![Ident::new("c1")],
|
||||
columns: vec![mk_expected_col("c1")],
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -16669,7 +16841,7 @@ mod tests {
|
|||
display_as_key: false,
|
||||
name: Some(Ident::new("idx_name")),
|
||||
index_type: Some(IndexType::BTree),
|
||||
columns: vec![Ident::new("c1")],
|
||||
columns: vec![mk_expected_col("c1")],
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -16680,7 +16852,7 @@ mod tests {
|
|||
display_as_key: false,
|
||||
name: Some(Ident::new("idx_name")),
|
||||
index_type: Some(IndexType::Hash),
|
||||
columns: vec![Ident::new("c1")],
|
||||
columns: vec![mk_expected_col("c1")],
|
||||
}
|
||||
);
|
||||
}
|
||||
|
|
|
@ -270,7 +270,7 @@ impl TestedDialects {
|
|||
tokenizer = tokenizer.with_unescape(options.unescape);
|
||||
}
|
||||
let tokens = tokenizer.tokenize().unwrap();
|
||||
assert_eq!(expected, tokens, "Tokenized differently for {:?}", dialect);
|
||||
assert_eq!(expected, tokens, "Tokenized differently for {dialect:?}");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -366,6 +366,11 @@ pub fn number(n: &str) -> Value {
|
|||
Value::Number(n.parse().unwrap(), false)
|
||||
}
|
||||
|
||||
/// Creates a [Value::SingleQuotedString]
|
||||
pub fn single_quoted_string(s: impl Into<String>) -> Value {
|
||||
Value::SingleQuotedString(s.into())
|
||||
}
|
||||
|
||||
pub fn table_alias(name: impl Into<String>) -> Option<TableAlias> {
|
||||
Some(TableAlias {
|
||||
name: Ident::new(name),
|
||||
|
@ -448,3 +453,47 @@ pub fn call(function: &str, args: impl IntoIterator<Item = Expr>) -> Expr {
|
|||
within_group: vec![],
|
||||
})
|
||||
}
|
||||
|
||||
/// Gets the first index column (mysql calls it a key part) of the first index found in a
|
||||
/// [`Statement::CreateIndex`], [`Statement::CreateTable`], or [`Statement::AlterTable`].
|
||||
pub fn index_column(stmt: Statement) -> Expr {
|
||||
match stmt {
|
||||
Statement::CreateIndex(CreateIndex { columns, .. }) => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
Statement::CreateTable(CreateTable { constraints, .. }) => {
|
||||
match constraints.first().unwrap() {
|
||||
TableConstraint::Index { columns, .. } => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
TableConstraint::Unique { columns, .. } => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
TableConstraint::PrimaryKey { columns, .. } => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
TableConstraint::FulltextOrSpatial { columns, .. } => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
|
||||
}
|
||||
}
|
||||
Statement::AlterTable { operations, .. } => match operations.first().unwrap() {
|
||||
AlterTableOperation::AddConstraint(TableConstraint::Index { columns, .. }) => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
AlterTableOperation::AddConstraint(TableConstraint::Unique { columns, .. }) => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
AlterTableOperation::AddConstraint(TableConstraint::PrimaryKey { columns, .. }) => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
AlterTableOperation::AddConstraint(TableConstraint::FulltextOrSpatial {
|
||||
columns,
|
||||
..
|
||||
}) => columns.first().unwrap().column.expr.clone(),
|
||||
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
|
||||
},
|
||||
_ => panic!("Expected CREATE INDEX, ALTER TABLE, or CREATE TABLE, got: {stmt:?}"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1751,7 +1751,7 @@ impl<'a> Tokenizer<'a> {
|
|||
(None, Some(tok)) => Ok(Some(tok)),
|
||||
(None, None) => self.tokenizer_error(
|
||||
chars.location(),
|
||||
format!("Expected a valid binary operator after '{}'", prefix),
|
||||
format!("Expected a valid binary operator after '{prefix}'"),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -1809,7 +1809,7 @@ impl<'a> Tokenizer<'a> {
|
|||
chars.next();
|
||||
|
||||
let mut temp = String::new();
|
||||
let end_delimiter = format!("${}$", value);
|
||||
let end_delimiter = format!("${value}$");
|
||||
|
||||
loop {
|
||||
match chars.next() {
|
||||
|
@ -2402,13 +2402,13 @@ fn take_char_from_hex_digits(
|
|||
location: chars.location(),
|
||||
})?;
|
||||
let digit = next_char.to_digit(16).ok_or_else(|| TokenizerError {
|
||||
message: format!("Invalid hex digit in escaped unicode string: {}", next_char),
|
||||
message: format!("Invalid hex digit in escaped unicode string: {next_char}"),
|
||||
location: chars.location(),
|
||||
})?;
|
||||
result = result * 16 + digit;
|
||||
}
|
||||
char::from_u32(result).ok_or_else(|| TokenizerError {
|
||||
message: format!("Invalid unicode character: {:x}", result),
|
||||
message: format!("Invalid unicode character: {result:x}"),
|
||||
location: chars.location(),
|
||||
})
|
||||
}
|
||||
|
@ -3504,7 +3504,7 @@ mod tests {
|
|||
}
|
||||
|
||||
fn check_unescape(s: &str, expected: Option<&str>) {
|
||||
let s = format!("'{}'", s);
|
||||
let s = format!("'{s}'");
|
||||
let mut state = State {
|
||||
peekable: s.chars().peekable(),
|
||||
line: 0,
|
||||
|
|
|
@ -1,3 +1,20 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
use sqlparser::dialect::GenericDialect;
|
||||
use sqlparser::parser::Parser;
|
||||
|
||||
|
|
|
@ -261,10 +261,10 @@ fn parse_at_at_identifier() {
|
|||
|
||||
#[test]
|
||||
fn parse_begin() {
|
||||
let sql = r#"BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT 2; END"#;
|
||||
let sql = r#"BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT 2; RAISE USING MESSAGE = FORMAT('ERR: %s', 'Bad'); END"#;
|
||||
let Statement::StartTransaction {
|
||||
statements,
|
||||
exception_statements,
|
||||
exception,
|
||||
has_end_keyword,
|
||||
..
|
||||
} = bigquery().verified_stmt(sql)
|
||||
|
@ -272,7 +272,10 @@ fn parse_begin() {
|
|||
unreachable!();
|
||||
};
|
||||
assert_eq!(1, statements.len());
|
||||
assert_eq!(1, exception_statements.unwrap().len());
|
||||
assert!(exception.is_some());
|
||||
|
||||
let exception = exception.unwrap();
|
||||
assert_eq!(1, exception.len());
|
||||
assert!(has_end_keyword);
|
||||
|
||||
bigquery().verified_stmt(
|
||||
|
@ -352,14 +355,16 @@ fn parse_create_view_with_options() {
|
|||
ViewColumnDef {
|
||||
name: Ident::new("age"),
|
||||
data_type: None,
|
||||
options: Some(vec![ColumnOption::Options(vec![SqlOption::KeyValue {
|
||||
key: Ident::new("description"),
|
||||
value: Expr::Value(
|
||||
Value::DoubleQuotedString("field age".to_string()).with_span(
|
||||
Span::new(Location::new(1, 42), Location::new(1, 52))
|
||||
)
|
||||
),
|
||||
}])]),
|
||||
options: Some(ColumnOptions::CommaSeparated(vec![ColumnOption::Options(
|
||||
vec![SqlOption::KeyValue {
|
||||
key: Ident::new("description"),
|
||||
value: Expr::Value(
|
||||
Value::DoubleQuotedString("field age".to_string()).with_span(
|
||||
Span::new(Location::new(1, 42), Location::new(1, 52))
|
||||
)
|
||||
),
|
||||
}]
|
||||
)])),
|
||||
},
|
||||
],
|
||||
columns
|
||||
|
@ -536,8 +541,8 @@ fn parse_create_table_with_options() {
|
|||
(
|
||||
Some(Box::new(Expr::Identifier(Ident::new("_PARTITIONDATE")))),
|
||||
Some(WrappedCollection::NoWrapping(vec![
|
||||
Ident::new("userid"),
|
||||
Ident::new("age"),
|
||||
Expr::Identifier(Ident::new("userid")),
|
||||
Expr::Identifier(Ident::new("age")),
|
||||
])),
|
||||
CreateTableOptions::Options(vec![
|
||||
SqlOption::KeyValue {
|
||||
|
@ -601,11 +606,13 @@ fn parse_nested_data_types() {
|
|||
field_name: Some("a".into()),
|
||||
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(
|
||||
Box::new(DataType::Int64,)
|
||||
))
|
||||
)),
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some("b".into()),
|
||||
field_type: DataType::Bytes(Some(42))
|
||||
field_type: DataType::Bytes(Some(42)),
|
||||
options: None,
|
||||
},
|
||||
],
|
||||
StructBracketKind::AngleBrackets
|
||||
|
@ -619,6 +626,7 @@ fn parse_nested_data_types() {
|
|||
vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
}],
|
||||
StructBracketKind::AngleBrackets
|
||||
),
|
||||
|
@ -632,35 +640,6 @@ fn parse_nested_data_types() {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_invalid_brackets() {
|
||||
let sql = "SELECT STRUCT<INT64>>(NULL)";
|
||||
assert_eq!(
|
||||
bigquery_and_generic()
|
||||
.parse_sql_statements(sql)
|
||||
.unwrap_err(),
|
||||
ParserError::ParserError("unmatched > in STRUCT literal".to_string())
|
||||
);
|
||||
|
||||
let sql = "SELECT STRUCT<STRUCT<INT64>>>(NULL)";
|
||||
assert_eq!(
|
||||
bigquery_and_generic()
|
||||
.parse_sql_statements(sql)
|
||||
.unwrap_err(),
|
||||
ParserError::ParserError("Expected: (, found: >".to_string())
|
||||
);
|
||||
|
||||
let sql = "CREATE TABLE table (x STRUCT<STRUCT<INT64>>>)";
|
||||
assert_eq!(
|
||||
bigquery_and_generic()
|
||||
.parse_sql_statements(sql)
|
||||
.unwrap_err(),
|
||||
ParserError::ParserError(
|
||||
"Expected: ',' or ')' after column definition, found: >".to_string()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_tuple_struct_literal() {
|
||||
// tuple syntax: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#tuple_syntax
|
||||
|
@ -771,6 +750,7 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -799,7 +779,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
quote_style: None,
|
||||
span: Span::empty(),
|
||||
}),
|
||||
field_type: DataType::Int64
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some(Ident {
|
||||
|
@ -807,7 +788,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
quote_style: None,
|
||||
span: Span::empty(),
|
||||
}),
|
||||
field_type: DataType::String(None)
|
||||
field_type: DataType::String(None),
|
||||
options: None,
|
||||
},
|
||||
]
|
||||
},
|
||||
|
@ -825,17 +807,20 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
field_name: Some("arr".into()),
|
||||
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
|
||||
DataType::Float64
|
||||
)))
|
||||
))),
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some("str".into()),
|
||||
field_type: DataType::Struct(
|
||||
vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Bool
|
||||
field_type: DataType::Bool,
|
||||
options: None,
|
||||
}],
|
||||
StructBracketKind::AngleBrackets
|
||||
)
|
||||
),
|
||||
options: None,
|
||||
},
|
||||
]
|
||||
},
|
||||
|
@ -858,13 +843,15 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
field_type: DataType::Struct(
|
||||
Default::default(),
|
||||
StructBracketKind::AngleBrackets
|
||||
)
|
||||
),
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some("y".into()),
|
||||
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
|
||||
DataType::Struct(Default::default(), StructBracketKind::AngleBrackets)
|
||||
)))
|
||||
))),
|
||||
options: None,
|
||||
},
|
||||
]
|
||||
},
|
||||
|
@ -879,7 +866,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
values: vec![Expr::Value(Value::Boolean(true).with_empty_span())],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Bool
|
||||
field_type: DataType::Bool,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -891,7 +879,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
)],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Bytes(Some(42))
|
||||
field_type: DataType::Bytes(Some(42)),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[1])
|
||||
|
@ -907,7 +896,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
)],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Date
|
||||
field_type: DataType::Date,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -920,7 +910,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
}],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Datetime(None)
|
||||
field_type: DataType::Datetime(None),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[1])
|
||||
|
@ -930,7 +921,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
values: vec![Expr::value(number("5.0"))],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Float64
|
||||
field_type: DataType::Float64,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[2])
|
||||
|
@ -940,7 +932,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
values: vec![Expr::value(number("1"))],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Int64
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[3])
|
||||
|
@ -962,7 +955,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
})],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Interval
|
||||
field_type: DataType::Interval,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -977,7 +971,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
}],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::JSON
|
||||
field_type: DataType::JSON,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[1])
|
||||
|
@ -993,7 +988,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
)],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::String(Some(42))
|
||||
field_type: DataType::String(Some(42)),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -1006,7 +1002,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
}],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Timestamp(None, TimezoneInfo::None)
|
||||
field_type: DataType::Timestamp(None, TimezoneInfo::None),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[1])
|
||||
|
@ -1020,7 +1017,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
}],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Time(None, TimezoneInfo::None)
|
||||
field_type: DataType::Time(None, TimezoneInfo::None),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[2])
|
||||
|
@ -1037,7 +1035,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
}],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Numeric(ExactNumberInfo::None)
|
||||
field_type: DataType::Numeric(ExactNumberInfo::None),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -1050,7 +1049,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
}],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::BigNumeric(ExactNumberInfo::None)
|
||||
field_type: DataType::BigNumeric(ExactNumberInfo::None),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[1])
|
||||
|
@ -1067,10 +1067,12 @@ fn parse_typed_struct_syntax_bigquery() {
|
|||
StructField {
|
||||
field_name: Some("key".into()),
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some("value".into()),
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
},
|
||||
]
|
||||
},
|
||||
|
@ -1092,6 +1094,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -1120,7 +1123,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
quote_style: None,
|
||||
span: Span::empty(),
|
||||
}),
|
||||
field_type: DataType::Int64
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some(Ident {
|
||||
|
@ -1128,7 +1132,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
quote_style: None,
|
||||
span: Span::empty(),
|
||||
}),
|
||||
field_type: DataType::String(None)
|
||||
field_type: DataType::String(None),
|
||||
options: None,
|
||||
},
|
||||
]
|
||||
},
|
||||
|
@ -1151,13 +1156,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
field_type: DataType::Struct(
|
||||
Default::default(),
|
||||
StructBracketKind::AngleBrackets
|
||||
)
|
||||
),
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some("y".into()),
|
||||
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
|
||||
DataType::Struct(Default::default(), StructBracketKind::AngleBrackets)
|
||||
)))
|
||||
))),
|
||||
options: None,
|
||||
},
|
||||
]
|
||||
},
|
||||
|
@ -1172,7 +1179,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
values: vec![Expr::Value(Value::Boolean(true).with_empty_span())],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Bool
|
||||
field_type: DataType::Bool,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -1184,7 +1192,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
)],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Bytes(Some(42))
|
||||
field_type: DataType::Bytes(Some(42)),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[1])
|
||||
|
@ -1200,7 +1209,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
)],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Date
|
||||
field_type: DataType::Date,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -1213,7 +1223,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
}],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Datetime(None)
|
||||
field_type: DataType::Datetime(None),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[1])
|
||||
|
@ -1223,7 +1234,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
values: vec![Expr::value(number("5.0"))],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Float64
|
||||
field_type: DataType::Float64,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[2])
|
||||
|
@ -1233,7 +1245,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
values: vec![Expr::value(number("1"))],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Int64
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[3])
|
||||
|
@ -1255,7 +1268,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
})],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Interval
|
||||
field_type: DataType::Interval,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -1270,7 +1284,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
}],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::JSON
|
||||
field_type: DataType::JSON,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[1])
|
||||
|
@ -1286,7 +1301,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
)],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::String(Some(42))
|
||||
field_type: DataType::String(Some(42)),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -1299,7 +1315,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
}],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Timestamp(None, TimezoneInfo::None)
|
||||
field_type: DataType::Timestamp(None, TimezoneInfo::None),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[1])
|
||||
|
@ -1313,7 +1330,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
}],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Time(None, TimezoneInfo::None)
|
||||
field_type: DataType::Time(None, TimezoneInfo::None),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[2])
|
||||
|
@ -1330,7 +1348,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
}],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Numeric(ExactNumberInfo::None)
|
||||
field_type: DataType::Numeric(ExactNumberInfo::None),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -1343,7 +1362,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
|||
}],
|
||||
fields: vec![StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::BigNumeric(ExactNumberInfo::None)
|
||||
field_type: DataType::BigNumeric(ExactNumberInfo::None),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[1])
|
||||
|
@ -1360,7 +1380,8 @@ fn parse_typed_struct_with_field_name_bigquery() {
|
|||
values: vec![Expr::value(number("5"))],
|
||||
fields: vec![StructField {
|
||||
field_name: Some(Ident::from("x")),
|
||||
field_type: DataType::Int64
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -1372,7 +1393,8 @@ fn parse_typed_struct_with_field_name_bigquery() {
|
|||
)],
|
||||
fields: vec![StructField {
|
||||
field_name: Some(Ident::from("y")),
|
||||
field_type: DataType::String(None)
|
||||
field_type: DataType::String(None),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[1])
|
||||
|
@ -1387,11 +1409,13 @@ fn parse_typed_struct_with_field_name_bigquery() {
|
|||
fields: vec![
|
||||
StructField {
|
||||
field_name: Some(Ident::from("x")),
|
||||
field_type: DataType::Int64
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some(Ident::from("y")),
|
||||
field_type: DataType::Int64
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1409,7 +1433,8 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() {
|
|||
values: vec![Expr::value(number("5"))],
|
||||
fields: vec![StructField {
|
||||
field_name: Some(Ident::from("x")),
|
||||
field_type: DataType::Int64
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[0])
|
||||
|
@ -1421,7 +1446,8 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() {
|
|||
)],
|
||||
fields: vec![StructField {
|
||||
field_name: Some(Ident::from("y")),
|
||||
field_type: DataType::String(None)
|
||||
field_type: DataType::String(None),
|
||||
options: None,
|
||||
}]
|
||||
},
|
||||
expr_from_projection(&select.projection[1])
|
||||
|
@ -1436,11 +1462,13 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() {
|
|||
fields: vec![
|
||||
StructField {
|
||||
field_name: Some(Ident::from("x")),
|
||||
field_type: DataType::Int64
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some(Ident::from("y")),
|
||||
field_type: DataType::Int64
|
||||
field_type: DataType::Int64,
|
||||
options: None,
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -2313,16 +2341,46 @@ fn bigquery_select_expr_star() {
|
|||
|
||||
#[test]
|
||||
fn test_select_as_struct() {
|
||||
bigquery().verified_only_select("SELECT * FROM (SELECT AS VALUE STRUCT(123 AS a, false AS b))");
|
||||
for (sql, parse_to) in [
|
||||
(
|
||||
"SELECT * FROM (SELECT AS STRUCT STRUCT(123 AS a, false AS b))",
|
||||
"SELECT * FROM (SELECT AS STRUCT STRUCT(123 AS a, false AS b))",
|
||||
),
|
||||
(
|
||||
"SELECT * FROM (SELECT DISTINCT AS STRUCT STRUCT(123 AS a, false AS b))",
|
||||
"SELECT * FROM (SELECT DISTINCT AS STRUCT STRUCT(123 AS a, false AS b))",
|
||||
),
|
||||
(
|
||||
"SELECT * FROM (SELECT ALL AS STRUCT STRUCT(123 AS a, false AS b))",
|
||||
"SELECT * FROM (SELECT AS STRUCT STRUCT(123 AS a, false AS b))",
|
||||
),
|
||||
] {
|
||||
bigquery().one_statement_parses_to(sql, parse_to);
|
||||
}
|
||||
|
||||
let select = bigquery().verified_only_select("SELECT AS STRUCT 1 AS a, 2 AS b");
|
||||
assert_eq!(Some(ValueTableMode::AsStruct), select.value_table_mode);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_as_value() {
|
||||
bigquery().verified_only_select(
|
||||
"SELECT * FROM (SELECT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
|
||||
);
|
||||
for (sql, parse_to) in [
|
||||
(
|
||||
"SELECT * FROM (SELECT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
|
||||
"SELECT * FROM (SELECT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
|
||||
),
|
||||
(
|
||||
"SELECT * FROM (SELECT DISTINCT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
|
||||
"SELECT * FROM (SELECT DISTINCT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
|
||||
),
|
||||
(
|
||||
"SELECT * FROM (SELECT ALL AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
|
||||
"SELECT * FROM (SELECT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
|
||||
),
|
||||
] {
|
||||
bigquery().one_statement_parses_to(sql, parse_to);
|
||||
}
|
||||
|
||||
let select = bigquery().verified_only_select("SELECT AS VALUE STRUCT(1 AS a, 2 AS b) AS xyz");
|
||||
assert_eq!(Some(ValueTableMode::AsValue), select.value_table_mode);
|
||||
}
|
||||
|
@ -2377,3 +2435,91 @@ fn test_any_type() {
|
|||
fn test_any_type_dont_break_custom_type() {
|
||||
bigquery_and_generic().verified_stmt("CREATE TABLE foo (x ANY)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct_field_options() {
|
||||
bigquery().verified_stmt(concat!(
|
||||
"CREATE TABLE my_table (",
|
||||
"f0 STRUCT<a STRING, b INT64>, ",
|
||||
"f1 STRUCT<",
|
||||
"a STRING OPTIONS(description = 'This is a string', type = 'string'), ",
|
||||
"b INT64",
|
||||
"> OPTIONS(description = 'This is a struct field')",
|
||||
")",
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct_trailing_and_nested_bracket() {
|
||||
bigquery().verified_stmt(concat!(
|
||||
"CREATE TABLE my_table (",
|
||||
"f0 STRING, ",
|
||||
"f1 STRUCT<a STRING, b STRUCT<c INT64, d STRING>>, ",
|
||||
"f2 STRING",
|
||||
")",
|
||||
));
|
||||
|
||||
// More complex nested structs
|
||||
bigquery().verified_stmt(concat!(
|
||||
"CREATE TABLE my_table (",
|
||||
"f0 STRING, ",
|
||||
"f1 STRUCT<a STRING, b STRUCT<c INT64, d STRUCT<e STRING>>>, ",
|
||||
"f2 STRUCT<h STRING, i STRUCT<j INT64, k STRUCT<l STRUCT<m STRING>>>>, ",
|
||||
"f3 STRUCT<e STRING, f STRUCT<c INT64>>",
|
||||
")",
|
||||
));
|
||||
|
||||
// Bad case with missing closing bracket
|
||||
assert_eq!(
|
||||
ParserError::ParserError("Expected: >, found: )".to_owned()),
|
||||
bigquery()
|
||||
.parse_sql_statements("CREATE TABLE my_table(f1 STRUCT<a STRING, b INT64)")
|
||||
.unwrap_err()
|
||||
);
|
||||
|
||||
// Bad case with redundant closing bracket
|
||||
assert_eq!(
|
||||
ParserError::ParserError(
|
||||
"unmatched > after parsing data type STRUCT<a STRING, b INT64>)".to_owned()
|
||||
),
|
||||
bigquery()
|
||||
.parse_sql_statements("CREATE TABLE my_table(f1 STRUCT<a STRING, b INT64>>)")
|
||||
.unwrap_err()
|
||||
);
|
||||
|
||||
// Base case with redundant closing bracket in nested struct
|
||||
assert_eq!(
|
||||
ParserError::ParserError(
|
||||
"Expected: ',' or ')' after column definition, found: >".to_owned()
|
||||
),
|
||||
bigquery()
|
||||
.parse_sql_statements("CREATE TABLE my_table(f1 STRUCT<a STRUCT<b INT>>>, c INT64)")
|
||||
.unwrap_err()
|
||||
);
|
||||
|
||||
let sql = "SELECT STRUCT<INT64>>(NULL)";
|
||||
assert_eq!(
|
||||
bigquery_and_generic()
|
||||
.parse_sql_statements(sql)
|
||||
.unwrap_err(),
|
||||
ParserError::ParserError("unmatched > in STRUCT literal".to_string())
|
||||
);
|
||||
|
||||
let sql = "SELECT STRUCT<STRUCT<INT64>>>(NULL)";
|
||||
assert_eq!(
|
||||
bigquery_and_generic()
|
||||
.parse_sql_statements(sql)
|
||||
.unwrap_err(),
|
||||
ParserError::ParserError("Expected: (, found: >".to_string())
|
||||
);
|
||||
|
||||
let sql = "CREATE TABLE table (x STRUCT<STRUCT<INT64>>>)";
|
||||
assert_eq!(
|
||||
bigquery_and_generic()
|
||||
.parse_sql_statements(sql)
|
||||
.unwrap_err(),
|
||||
ParserError::ParserError(
|
||||
"Expected: ',' or ')' after column definition, found: >".to_string()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ use test_utils::*;
|
|||
use sqlparser::ast::Expr::{BinaryOp, Identifier};
|
||||
use sqlparser::ast::SelectItem::UnnamedExpr;
|
||||
use sqlparser::ast::TableFactor::Table;
|
||||
use sqlparser::ast::Value::Number;
|
||||
use sqlparser::ast::Value::Boolean;
|
||||
use sqlparser::ast::*;
|
||||
use sqlparser::dialect::ClickHouseDialect;
|
||||
use sqlparser::dialect::GenericDialect;
|
||||
|
@ -669,11 +669,13 @@ fn parse_create_table_with_nested_data_types() {
|
|||
DataType::Tuple(vec![
|
||||
StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::FixedString(128)
|
||||
field_type: DataType::FixedString(128),
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: None,
|
||||
field_type: DataType::Int128
|
||||
field_type: DataType::Int128,
|
||||
options: None,
|
||||
}
|
||||
])
|
||||
))),
|
||||
|
@ -685,12 +687,14 @@ fn parse_create_table_with_nested_data_types() {
|
|||
StructField {
|
||||
field_name: Some("a".into()),
|
||||
field_type: DataType::Datetime64(9, None),
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some("b".into()),
|
||||
field_type: DataType::Array(ArrayElemTypeDef::Parenthesis(
|
||||
Box::new(DataType::Uuid)
|
||||
))
|
||||
)),
|
||||
options: None,
|
||||
},
|
||||
]),
|
||||
options: vec![],
|
||||
|
@ -910,7 +914,7 @@ fn parse_create_view_with_fields_data_types() {
|
|||
}]),
|
||||
vec![]
|
||||
)),
|
||||
options: None
|
||||
options: None,
|
||||
},
|
||||
ViewColumnDef {
|
||||
name: "f".into(),
|
||||
|
@ -922,7 +926,7 @@ fn parse_create_view_with_fields_data_types() {
|
|||
}]),
|
||||
vec![]
|
||||
)),
|
||||
options: None
|
||||
options: None,
|
||||
},
|
||||
]
|
||||
);
|
||||
|
@ -961,38 +965,103 @@ fn parse_limit_by() {
|
|||
|
||||
#[test]
|
||||
fn parse_settings_in_query() {
|
||||
match clickhouse_and_generic()
|
||||
.verified_stmt(r#"SELECT * FROM t SETTINGS max_threads = 1, max_block_size = 10000"#)
|
||||
{
|
||||
Statement::Query(query) => {
|
||||
assert_eq!(
|
||||
query.settings,
|
||||
Some(vec![
|
||||
Setting {
|
||||
key: Ident::new("max_threads"),
|
||||
value: Number("1".parse().unwrap(), false)
|
||||
},
|
||||
Setting {
|
||||
key: Ident::new("max_block_size"),
|
||||
value: Number("10000".parse().unwrap(), false)
|
||||
},
|
||||
])
|
||||
);
|
||||
fn check_settings(sql: &str, expected: Vec<Setting>) {
|
||||
match clickhouse_and_generic().verified_stmt(sql) {
|
||||
Statement::Query(q) => {
|
||||
assert_eq!(q.settings, Some(expected));
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
for (sql, expected_settings) in [
|
||||
(
|
||||
r#"SELECT * FROM t SETTINGS max_threads = 1, max_block_size = 10000"#,
|
||||
vec![
|
||||
Setting {
|
||||
key: Ident::new("max_threads"),
|
||||
value: Expr::value(number("1")),
|
||||
},
|
||||
Setting {
|
||||
key: Ident::new("max_block_size"),
|
||||
value: Expr::value(number("10000")),
|
||||
},
|
||||
],
|
||||
),
|
||||
(
|
||||
r#"SELECT * FROM t SETTINGS additional_table_filters = {'table_1': 'x != 2'}"#,
|
||||
vec![Setting {
|
||||
key: Ident::new("additional_table_filters"),
|
||||
value: Expr::Dictionary(vec![DictionaryField {
|
||||
key: Ident::with_quote('\'', "table_1"),
|
||||
value: Expr::value(single_quoted_string("x != 2")).into(),
|
||||
}]),
|
||||
}],
|
||||
),
|
||||
(
|
||||
r#"SELECT * FROM t SETTINGS additional_result_filter = 'x != 2', query_plan_optimize_lazy_materialization = false"#,
|
||||
vec![
|
||||
Setting {
|
||||
key: Ident::new("additional_result_filter"),
|
||||
value: Expr::value(single_quoted_string("x != 2")),
|
||||
},
|
||||
Setting {
|
||||
key: Ident::new("query_plan_optimize_lazy_materialization"),
|
||||
value: Expr::value(Boolean(false)),
|
||||
},
|
||||
],
|
||||
),
|
||||
] {
|
||||
check_settings(sql, expected_settings);
|
||||
}
|
||||
|
||||
let invalid_cases = vec![
|
||||
"SELECT * FROM t SETTINGS a",
|
||||
"SELECT * FROM t SETTINGS a=",
|
||||
"SELECT * FROM t SETTINGS a=1, b",
|
||||
"SELECT * FROM t SETTINGS a=1, b=",
|
||||
"SELECT * FROM t SETTINGS a=1, b=c",
|
||||
("SELECT * FROM t SETTINGS a", "Expected: =, found: EOF"),
|
||||
(
|
||||
"SELECT * FROM t SETTINGS a=",
|
||||
"Expected: an expression, found: EOF",
|
||||
),
|
||||
("SELECT * FROM t SETTINGS a=1, b", "Expected: =, found: EOF"),
|
||||
(
|
||||
"SELECT * FROM t SETTINGS a=1, b=",
|
||||
"Expected: an expression, found: EOF",
|
||||
),
|
||||
(
|
||||
"SELECT * FROM t SETTINGS a = {",
|
||||
"Expected: identifier, found: EOF",
|
||||
),
|
||||
(
|
||||
"SELECT * FROM t SETTINGS a = {'b'",
|
||||
"Expected: :, found: EOF",
|
||||
),
|
||||
(
|
||||
"SELECT * FROM t SETTINGS a = {'b': ",
|
||||
"Expected: an expression, found: EOF",
|
||||
),
|
||||
(
|
||||
"SELECT * FROM t SETTINGS a = {'b': 'c',}",
|
||||
"Expected: identifier, found: }",
|
||||
),
|
||||
(
|
||||
"SELECT * FROM t SETTINGS a = {'b': 'c', 'd'}",
|
||||
"Expected: :, found: }",
|
||||
),
|
||||
(
|
||||
"SELECT * FROM t SETTINGS a = {'b': 'c', 'd': }",
|
||||
"Expected: an expression, found: }",
|
||||
),
|
||||
(
|
||||
"SELECT * FROM t SETTINGS a = {ANY(b)}",
|
||||
"Expected: :, found: (",
|
||||
),
|
||||
];
|
||||
for sql in invalid_cases {
|
||||
clickhouse_and_generic()
|
||||
.parse_sql_statements(sql)
|
||||
.expect_err("Expected: SETTINGS key = value, found: ");
|
||||
for (sql, error_msg) in invalid_cases {
|
||||
assert_eq!(
|
||||
clickhouse_and_generic()
|
||||
.parse_sql_statements(sql)
|
||||
.unwrap_err(),
|
||||
ParserError(error_msg.to_string())
|
||||
);
|
||||
}
|
||||
}
|
||||
#[test]
|
||||
|
@ -1341,7 +1410,7 @@ fn parse_use() {
|
|||
for object_name in &valid_object_names {
|
||||
// Test single identifier without quotes
|
||||
assert_eq!(
|
||||
clickhouse().verified_stmt(&format!("USE {}", object_name)),
|
||||
clickhouse().verified_stmt(&format!("USE {object_name}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||
object_name.to_string()
|
||||
)])))
|
||||
|
@ -1349,7 +1418,7 @@ fn parse_use() {
|
|||
for "e in "e_styles {
|
||||
// Test single identifier with different type of quotes
|
||||
assert_eq!(
|
||||
clickhouse().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
|
||||
clickhouse().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
object_name.to_string(),
|
||||
|
@ -1363,7 +1432,7 @@ fn parse_use() {
|
|||
fn test_query_with_format_clause() {
|
||||
let format_options = vec!["TabSeparated", "JSONCompact", "NULL"];
|
||||
for format in &format_options {
|
||||
let sql = format!("SELECT * FROM t FORMAT {}", format);
|
||||
let sql = format!("SELECT * FROM t FORMAT {format}");
|
||||
match clickhouse_and_generic().verified_stmt(&sql) {
|
||||
Statement::Query(query) => {
|
||||
if *format == "NULL" {
|
||||
|
@ -1546,11 +1615,11 @@ fn parse_select_table_function_settings() {
|
|||
settings: Some(vec![
|
||||
Setting {
|
||||
key: "s0".into(),
|
||||
value: Value::Number("3".parse().unwrap(), false),
|
||||
value: Expr::value(number("3")),
|
||||
},
|
||||
Setting {
|
||||
key: "s1".into(),
|
||||
value: Value::SingleQuotedString("s".into()),
|
||||
value: Expr::value(single_quoted_string("s")),
|
||||
},
|
||||
]),
|
||||
},
|
||||
|
@ -1571,11 +1640,11 @@ fn parse_select_table_function_settings() {
|
|||
settings: Some(vec![
|
||||
Setting {
|
||||
key: "s0".into(),
|
||||
value: Value::Number("3".parse().unwrap(), false),
|
||||
value: Expr::value(number("3")),
|
||||
},
|
||||
Setting {
|
||||
key: "s1".into(),
|
||||
value: Value::SingleQuotedString("s".into()),
|
||||
value: Expr::value(single_quoted_string("s")),
|
||||
},
|
||||
]),
|
||||
},
|
||||
|
@ -1585,7 +1654,6 @@ fn parse_select_table_function_settings() {
|
|||
"SELECT * FROM t(SETTINGS a=)",
|
||||
"SELECT * FROM t(SETTINGS a=1, b)",
|
||||
"SELECT * FROM t(SETTINGS a=1, b=)",
|
||||
"SELECT * FROM t(SETTINGS a=1, b=c)",
|
||||
];
|
||||
for sql in invalid_cases {
|
||||
clickhouse_and_generic()
|
||||
|
|
|
@ -2225,7 +2225,7 @@ fn parse_in_subquery() {
|
|||
assert_eq!(
|
||||
Expr::InSubquery {
|
||||
expr: Box::new(Expr::Identifier(Ident::new("segment"))),
|
||||
subquery: verified_query("SELECT segm FROM bar").body,
|
||||
subquery: Box::new(verified_query("SELECT segm FROM bar")),
|
||||
negated: false,
|
||||
},
|
||||
select.selection.unwrap()
|
||||
|
@ -2239,7 +2239,9 @@ fn parse_in_union() {
|
|||
assert_eq!(
|
||||
Expr::InSubquery {
|
||||
expr: Box::new(Expr::Identifier(Ident::new("segment"))),
|
||||
subquery: verified_query("(SELECT segm FROM bar) UNION (SELECT segm FROM bar2)").body,
|
||||
subquery: Box::new(verified_query(
|
||||
"(SELECT segm FROM bar) UNION (SELECT segm FROM bar2)"
|
||||
)),
|
||||
negated: false,
|
||||
},
|
||||
select.selection.unwrap()
|
||||
|
@ -3561,7 +3563,7 @@ fn test_double_value() {
|
|||
for (input, expected) in test_cases {
|
||||
for (i, expr) in input.iter().enumerate() {
|
||||
if let Statement::Query(query) =
|
||||
dialects.one_statement_parses_to(&format!("SELECT {}", expr), "")
|
||||
dialects.one_statement_parses_to(&format!("SELECT {expr}"), "")
|
||||
{
|
||||
if let SetExpr::Select(select) = *query.body {
|
||||
assert_eq!(expected[i], select.projection[0]);
|
||||
|
@ -3791,6 +3793,7 @@ fn parse_create_table() {
|
|||
vec![
|
||||
TableConstraint::ForeignKey {
|
||||
name: Some("fkey".into()),
|
||||
index_name: None,
|
||||
columns: vec!["lat".into()],
|
||||
foreign_table: ObjectName::from(vec!["othertable3".into()]),
|
||||
referred_columns: vec!["lat".into()],
|
||||
|
@ -3800,6 +3803,7 @@ fn parse_create_table() {
|
|||
},
|
||||
TableConstraint::ForeignKey {
|
||||
name: Some("fkey2".into()),
|
||||
index_name: None,
|
||||
columns: vec!["lat".into()],
|
||||
foreign_table: ObjectName::from(vec!["othertable4".into()]),
|
||||
referred_columns: vec!["lat".into()],
|
||||
|
@ -3809,6 +3813,7 @@ fn parse_create_table() {
|
|||
},
|
||||
TableConstraint::ForeignKey {
|
||||
name: None,
|
||||
index_name: None,
|
||||
columns: vec!["lat".into()],
|
||||
foreign_table: ObjectName::from(vec!["othertable4".into()]),
|
||||
referred_columns: vec!["lat".into()],
|
||||
|
@ -3818,6 +3823,7 @@ fn parse_create_table() {
|
|||
},
|
||||
TableConstraint::ForeignKey {
|
||||
name: None,
|
||||
index_name: None,
|
||||
columns: vec!["lng".into()],
|
||||
foreign_table: ObjectName::from(vec!["othertable4".into()]),
|
||||
referred_columns: vec!["longitude".into()],
|
||||
|
@ -3914,6 +3920,7 @@ fn parse_create_table_with_constraint_characteristics() {
|
|||
vec![
|
||||
TableConstraint::ForeignKey {
|
||||
name: Some("fkey".into()),
|
||||
index_name: None,
|
||||
columns: vec!["lat".into()],
|
||||
foreign_table: ObjectName::from(vec!["othertable3".into()]),
|
||||
referred_columns: vec!["lat".into()],
|
||||
|
@ -3927,6 +3934,7 @@ fn parse_create_table_with_constraint_characteristics() {
|
|||
},
|
||||
TableConstraint::ForeignKey {
|
||||
name: Some("fkey2".into()),
|
||||
index_name: None,
|
||||
columns: vec!["lat".into()],
|
||||
foreign_table: ObjectName::from(vec!["othertable4".into()]),
|
||||
referred_columns: vec!["lat".into()],
|
||||
|
@ -3940,6 +3948,7 @@ fn parse_create_table_with_constraint_characteristics() {
|
|||
},
|
||||
TableConstraint::ForeignKey {
|
||||
name: None,
|
||||
index_name: None,
|
||||
columns: vec!["lat".into()],
|
||||
foreign_table: ObjectName::from(vec!["othertable4".into()]),
|
||||
referred_columns: vec!["lat".into()],
|
||||
|
@ -3953,6 +3962,7 @@ fn parse_create_table_with_constraint_characteristics() {
|
|||
},
|
||||
TableConstraint::ForeignKey {
|
||||
name: None,
|
||||
index_name: None,
|
||||
columns: vec!["lng".into()],
|
||||
foreign_table: ObjectName::from(vec!["othertable4".into()]),
|
||||
referred_columns: vec!["longitude".into()],
|
||||
|
@ -4013,13 +4023,13 @@ fn parse_create_table_column_constraint_characteristics() {
|
|||
syntax
|
||||
};
|
||||
|
||||
let sql = format!("CREATE TABLE t (a int UNIQUE {})", syntax);
|
||||
let sql = format!("CREATE TABLE t (a int UNIQUE {syntax})");
|
||||
let expected_clause = if syntax.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!(" {syntax}")
|
||||
};
|
||||
let expected = format!("CREATE TABLE t (a INT UNIQUE{})", expected_clause);
|
||||
let expected = format!("CREATE TABLE t (a INT UNIQUE{expected_clause})");
|
||||
let ast = one_statement_parses_to(&sql, &expected);
|
||||
|
||||
let expected_value = if deferrable.is_some() || initially.is_some() || enforced.is_some() {
|
||||
|
@ -4260,6 +4270,9 @@ fn parse_create_schema() {
|
|||
verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a OPTIONS(key1 = 'value1')"#);
|
||||
verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a OPTIONS()"#);
|
||||
verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a DEFAULT COLLATE 'und:ci' OPTIONS()"#);
|
||||
verified_stmt(r#"CREATE SCHEMA a.b.c WITH (key1 = 'value1', key2 = 'value2')"#);
|
||||
verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a WITH (key1 = 'value1')"#);
|
||||
verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a WITH ()"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -6861,6 +6874,8 @@ fn parse_searched_case_expr() {
|
|||
let select = verified_only_select(sql);
|
||||
assert_eq!(
|
||||
&Case {
|
||||
case_token: AttachedToken::empty(),
|
||||
end_token: AttachedToken::empty(),
|
||||
operand: None,
|
||||
conditions: vec![
|
||||
CaseWhen {
|
||||
|
@ -6900,6 +6915,8 @@ fn parse_simple_case_expr() {
|
|||
use self::Expr::{Case, Identifier};
|
||||
assert_eq!(
|
||||
&Case {
|
||||
case_token: AttachedToken::empty(),
|
||||
end_token: AttachedToken::empty(),
|
||||
operand: Some(Box::new(Identifier(Ident::new("foo")))),
|
||||
conditions: vec![CaseWhen {
|
||||
condition: Expr::value(number("1")),
|
||||
|
@ -7482,7 +7499,7 @@ fn parse_cte_in_data_modification_statements() {
|
|||
assert_eq!(query.with.unwrap().to_string(), "WITH x AS (SELECT 1)");
|
||||
assert!(matches!(*query.body, SetExpr::Update(_)));
|
||||
}
|
||||
other => panic!("Expected: UPDATE, got: {:?}", other),
|
||||
other => panic!("Expected: UPDATE, got: {other:?}"),
|
||||
}
|
||||
|
||||
match verified_stmt("WITH t (x) AS (SELECT 9) DELETE FROM q WHERE id IN (SELECT x FROM t)") {
|
||||
|
@ -7490,7 +7507,7 @@ fn parse_cte_in_data_modification_statements() {
|
|||
assert_eq!(query.with.unwrap().to_string(), "WITH t (x) AS (SELECT 9)");
|
||||
assert!(matches!(*query.body, SetExpr::Delete(_)));
|
||||
}
|
||||
other => panic!("Expected: DELETE, got: {:?}", other),
|
||||
other => panic!("Expected: DELETE, got: {other:?}"),
|
||||
}
|
||||
|
||||
match verified_stmt("WITH x AS (SELECT 42) INSERT INTO t SELECT foo FROM x") {
|
||||
|
@ -7498,7 +7515,7 @@ fn parse_cte_in_data_modification_statements() {
|
|||
assert_eq!(query.with.unwrap().to_string(), "WITH x AS (SELECT 42)");
|
||||
assert!(matches!(*query.body, SetExpr::Insert(_)));
|
||||
}
|
||||
other => panic!("Expected: INSERT, got: {:?}", other),
|
||||
other => panic!("Expected: INSERT, got: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7973,7 +7990,7 @@ fn parse_create_view_with_columns() {
|
|||
.map(|name| ViewColumnDef {
|
||||
name,
|
||||
data_type: None,
|
||||
options: None
|
||||
options: None,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
@ -8577,8 +8594,11 @@ fn lateral_function() {
|
|||
#[test]
|
||||
fn parse_start_transaction() {
|
||||
let dialects = all_dialects_except(|d|
|
||||
// BigQuery does not support this syntax
|
||||
d.is::<BigQueryDialect>());
|
||||
// BigQuery and Snowflake does not support this syntax
|
||||
//
|
||||
// BigQuery: <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#begin_transaction>
|
||||
// Snowflake: <https://docs.snowflake.com/en/sql-reference/sql/begin>
|
||||
d.is::<BigQueryDialect>() || d.is::<SnowflakeDialect>());
|
||||
match dialects
|
||||
.verified_stmt("START TRANSACTION READ ONLY, READ WRITE, ISOLATION LEVEL SERIALIZABLE")
|
||||
{
|
||||
|
@ -9366,9 +9386,11 @@ fn parse_grant() {
|
|||
verified_stmt("GRANT SELECT ON VIEW view1 TO ROLE role1");
|
||||
verified_stmt("GRANT EXEC ON my_sp TO runner");
|
||||
verified_stmt("GRANT UPDATE ON my_table TO updater_role AS dbo");
|
||||
|
||||
all_dialects_where(|d| d.identifier_quote_style("none") == Some('['))
|
||||
.verified_stmt("GRANT SELECT ON [my_table] TO [public]");
|
||||
verified_stmt("GRANT SELECT ON FUTURE SCHEMAS IN DATABASE db1 TO ROLE role1");
|
||||
verified_stmt("GRANT SELECT ON FUTURE TABLES IN SCHEMA db1.sc1 TO ROLE role1");
|
||||
verified_stmt("GRANT SELECT ON FUTURE VIEWS IN SCHEMA db1.sc1 TO ROLE role1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -10023,7 +10045,7 @@ fn parse_offset_and_limit() {
|
|||
#[test]
|
||||
fn parse_time_functions() {
|
||||
fn test_time_function(func_name: &'static str) {
|
||||
let sql = format!("SELECT {}()", func_name);
|
||||
let sql = format!("SELECT {func_name}()");
|
||||
let select = verified_only_select(&sql);
|
||||
let select_localtime_func_call_ast = Function {
|
||||
name: ObjectName::from(vec![Ident::new(func_name)]),
|
||||
|
@ -10045,7 +10067,7 @@ fn parse_time_functions() {
|
|||
);
|
||||
|
||||
// Validating Parenthesis
|
||||
let sql_without_parens = format!("SELECT {}", func_name);
|
||||
let sql_without_parens = format!("SELECT {func_name}");
|
||||
let mut ast_without_parens = select_localtime_func_call_ast;
|
||||
ast_without_parens.args = FunctionArguments::None;
|
||||
assert_eq!(
|
||||
|
@ -14286,7 +14308,7 @@ fn overflow() {
|
|||
let expr = std::iter::repeat_n("1", 1000)
|
||||
.collect::<Vec<_>>()
|
||||
.join(" + ");
|
||||
let sql = format!("SELECT {}", expr);
|
||||
let sql = format!("SELECT {expr}");
|
||||
|
||||
let mut statements = Parser::parse_sql(&GenericDialect {}, sql.as_str()).unwrap();
|
||||
let statement = statements.pop().unwrap();
|
||||
|
@ -14586,7 +14608,7 @@ fn test_conditional_statement_span() {
|
|||
else_block.unwrap().span()
|
||||
);
|
||||
}
|
||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
||||
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -14642,6 +14664,8 @@ fn test_lambdas() {
|
|||
Expr::Lambda(LambdaFunction {
|
||||
params: OneOrManyWithParens::Many(vec![Ident::new("p1"), Ident::new("p2")]),
|
||||
body: Box::new(Expr::Case {
|
||||
case_token: AttachedToken::empty(),
|
||||
end_token: AttachedToken::empty(),
|
||||
operand: None,
|
||||
conditions: vec![
|
||||
CaseWhen {
|
||||
|
@ -15699,6 +15723,11 @@ fn parse_return() {
|
|||
let _ = all_dialects().verified_stmt("RETURN 1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_subquery_limit() {
|
||||
let _ = all_dialects().verified_stmt("SELECT t1_id, t1_name FROM t1 WHERE t1_id IN (SELECT t2_id FROM t2 WHERE t1_name = t2_name LIMIT 10)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_open() {
|
||||
let open_cursor = "OPEN Employee_Cursor";
|
||||
|
@ -15710,3 +15739,153 @@ fn test_open() {
|
|||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_truncate_only() {
|
||||
let truncate = all_dialects().verified_stmt("TRUNCATE TABLE employee, ONLY dept");
|
||||
|
||||
let table_names = vec![
|
||||
TruncateTableTarget {
|
||||
name: ObjectName::from(vec![Ident::new("employee")]),
|
||||
only: false,
|
||||
},
|
||||
TruncateTableTarget {
|
||||
name: ObjectName::from(vec![Ident::new("dept")]),
|
||||
only: true,
|
||||
},
|
||||
];
|
||||
|
||||
assert_eq!(
|
||||
Statement::Truncate {
|
||||
table_names,
|
||||
partitions: None,
|
||||
table: true,
|
||||
identity: None,
|
||||
cascade: None,
|
||||
on_cluster: None,
|
||||
},
|
||||
truncate
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_enforced() {
|
||||
all_dialects().verified_stmt(
|
||||
"CREATE TABLE t (a INT, b INT, c INT, CHECK (a > 0) NOT ENFORCED, CHECK (b > 0) ENFORCED, CHECK (c > 0))",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn join_precedence() {
|
||||
all_dialects_except(|d| !d.supports_left_associative_joins_without_parens())
|
||||
.verified_query_with_canonical(
|
||||
"SELECT *
|
||||
FROM t1
|
||||
NATURAL JOIN t5
|
||||
INNER JOIN t0 ON (t0.v1 + t5.v0) > 0
|
||||
WHERE t0.v1 = t1.v0",
|
||||
// canonical string without parentheses
|
||||
"SELECT * FROM t1 NATURAL JOIN t5 INNER JOIN t0 ON (t0.v1 + t5.v0) > 0 WHERE t0.v1 = t1.v0",
|
||||
);
|
||||
all_dialects_except(|d| d.supports_left_associative_joins_without_parens()).verified_query_with_canonical(
|
||||
"SELECT *
|
||||
FROM t1
|
||||
NATURAL JOIN t5
|
||||
INNER JOIN t0 ON (t0.v1 + t5.v0) > 0
|
||||
WHERE t0.v1 = t1.v0",
|
||||
// canonical string with parentheses
|
||||
"SELECT * FROM t1 NATURAL JOIN (t5 INNER JOIN t0 ON (t0.v1 + t5.v0) > 0) WHERE t0.v1 = t1.v0",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_create_procedure_with_language() {
|
||||
let sql = r#"CREATE PROCEDURE test_proc LANGUAGE sql AS BEGIN SELECT 1; END"#;
|
||||
match verified_stmt(sql) {
|
||||
Statement::CreateProcedure {
|
||||
or_alter,
|
||||
name,
|
||||
params,
|
||||
language,
|
||||
..
|
||||
} => {
|
||||
assert_eq!(or_alter, false);
|
||||
assert_eq!(name.to_string(), "test_proc");
|
||||
assert_eq!(params, Some(vec![]));
|
||||
assert_eq!(
|
||||
language,
|
||||
Some(Ident {
|
||||
value: "sql".into(),
|
||||
quote_style: None,
|
||||
span: Span {
|
||||
start: Location::empty(),
|
||||
end: Location::empty()
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_create_procedure_with_parameter_modes() {
|
||||
let sql = r#"CREATE PROCEDURE test_proc (IN a INTEGER, OUT b TEXT, INOUT c TIMESTAMP, d BOOL) AS BEGIN SELECT 1; END"#;
|
||||
match verified_stmt(sql) {
|
||||
Statement::CreateProcedure {
|
||||
or_alter,
|
||||
name,
|
||||
params,
|
||||
..
|
||||
} => {
|
||||
assert_eq!(or_alter, false);
|
||||
assert_eq!(name.to_string(), "test_proc");
|
||||
let fake_span = Span {
|
||||
start: Location { line: 0, column: 0 },
|
||||
end: Location { line: 0, column: 0 },
|
||||
};
|
||||
assert_eq!(
|
||||
params,
|
||||
Some(vec![
|
||||
ProcedureParam {
|
||||
name: Ident {
|
||||
value: "a".into(),
|
||||
quote_style: None,
|
||||
span: fake_span,
|
||||
},
|
||||
data_type: DataType::Integer(None),
|
||||
mode: Some(ArgMode::In)
|
||||
},
|
||||
ProcedureParam {
|
||||
name: Ident {
|
||||
value: "b".into(),
|
||||
quote_style: None,
|
||||
span: fake_span,
|
||||
},
|
||||
data_type: DataType::Text,
|
||||
mode: Some(ArgMode::Out)
|
||||
},
|
||||
ProcedureParam {
|
||||
name: Ident {
|
||||
value: "c".into(),
|
||||
quote_style: None,
|
||||
span: fake_span,
|
||||
},
|
||||
data_type: DataType::Timestamp(None, TimezoneInfo::None),
|
||||
mode: Some(ArgMode::InOut)
|
||||
},
|
||||
ProcedureParam {
|
||||
name: Ident {
|
||||
value: "d".into(),
|
||||
quote_style: None,
|
||||
span: fake_span,
|
||||
},
|
||||
data_type: DataType::Bool,
|
||||
mode: None
|
||||
},
|
||||
])
|
||||
);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
use sqlparser::ast::helpers::attached_token::AttachedToken;
|
||||
use sqlparser::ast::*;
|
||||
use sqlparser::dialect::{DatabricksDialect, GenericDialect};
|
||||
use sqlparser::parser::ParserError;
|
||||
|
@ -108,6 +109,8 @@ fn test_databricks_lambdas() {
|
|||
Expr::Lambda(LambdaFunction {
|
||||
params: OneOrManyWithParens::Many(vec![Ident::new("p1"), Ident::new("p2")]),
|
||||
body: Box::new(Expr::Case {
|
||||
case_token: AttachedToken::empty(),
|
||||
end_token: AttachedToken::empty(),
|
||||
operand: None,
|
||||
conditions: vec![
|
||||
CaseWhen {
|
||||
|
@ -210,7 +213,7 @@ fn parse_use() {
|
|||
for object_name in &valid_object_names {
|
||||
// Test single identifier without quotes
|
||||
assert_eq!(
|
||||
databricks().verified_stmt(&format!("USE {}", object_name)),
|
||||
databricks().verified_stmt(&format!("USE {object_name}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||
object_name.to_string()
|
||||
)])))
|
||||
|
@ -218,7 +221,7 @@ fn parse_use() {
|
|||
for "e in "e_styles {
|
||||
// Test single identifier with different type of quotes
|
||||
assert_eq!(
|
||||
databricks().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
|
||||
databricks().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
object_name.to_string(),
|
||||
|
@ -230,21 +233,21 @@ fn parse_use() {
|
|||
for "e in "e_styles {
|
||||
// Test single identifier with keyword and different type of quotes
|
||||
assert_eq!(
|
||||
databricks().verified_stmt(&format!("USE CATALOG {0}my_catalog{0}", quote)),
|
||||
databricks().verified_stmt(&format!("USE CATALOG {quote}my_catalog{quote}")),
|
||||
Statement::Use(Use::Catalog(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
"my_catalog".to_string(),
|
||||
)])))
|
||||
);
|
||||
assert_eq!(
|
||||
databricks().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)),
|
||||
databricks().verified_stmt(&format!("USE DATABASE {quote}my_database{quote}")),
|
||||
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
"my_database".to_string(),
|
||||
)])))
|
||||
);
|
||||
assert_eq!(
|
||||
databricks().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)),
|
||||
databricks().verified_stmt(&format!("USE SCHEMA {quote}my_schema{quote}")),
|
||||
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
"my_schema".to_string(),
|
||||
|
@ -354,6 +357,6 @@ fn data_type_timestamp_ntz() {
|
|||
}]
|
||||
);
|
||||
}
|
||||
s => panic!("Unexpected statement: {:?}", s),
|
||||
s => panic!("Unexpected statement: {s:?}"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,10 +44,12 @@ fn test_struct() {
|
|||
StructField {
|
||||
field_name: Some(Ident::new("v")),
|
||||
field_type: DataType::Varchar(None),
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some(Ident::new("i")),
|
||||
field_type: DataType::Integer(None),
|
||||
options: None,
|
||||
},
|
||||
],
|
||||
StructBracketKind::Parentheses,
|
||||
|
@ -84,6 +86,7 @@ fn test_struct() {
|
|||
StructField {
|
||||
field_name: Some(Ident::new("v")),
|
||||
field_type: DataType::Varchar(None),
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some(Ident::new("s")),
|
||||
|
@ -92,14 +95,17 @@ fn test_struct() {
|
|||
StructField {
|
||||
field_name: Some(Ident::new("a1")),
|
||||
field_type: DataType::Integer(None),
|
||||
options: None,
|
||||
},
|
||||
StructField {
|
||||
field_name: Some(Ident::new("a2")),
|
||||
field_type: DataType::Varchar(None),
|
||||
options: None,
|
||||
},
|
||||
],
|
||||
StructBracketKind::Parentheses,
|
||||
),
|
||||
options: None,
|
||||
},
|
||||
],
|
||||
StructBracketKind::Parentheses,
|
||||
|
@ -362,7 +368,7 @@ fn test_duckdb_specific_int_types() {
|
|||
("HUGEINT", DataType::HugeInt),
|
||||
];
|
||||
for (dtype_string, data_type) in duckdb_dtypes {
|
||||
let sql = format!("SELECT 123::{}", dtype_string);
|
||||
let sql = format!("SELECT 123::{dtype_string}");
|
||||
let select = duckdb().verified_only_select(&sql);
|
||||
assert_eq!(
|
||||
&Expr::Cast {
|
||||
|
@ -786,7 +792,7 @@ fn parse_use() {
|
|||
for object_name in &valid_object_names {
|
||||
// Test single identifier without quotes
|
||||
assert_eq!(
|
||||
duckdb().verified_stmt(&format!("USE {}", object_name)),
|
||||
duckdb().verified_stmt(&format!("USE {object_name}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||
object_name.to_string()
|
||||
)])))
|
||||
|
@ -794,7 +800,7 @@ fn parse_use() {
|
|||
for "e in "e_styles {
|
||||
// Test single identifier with different type of quotes
|
||||
assert_eq!(
|
||||
duckdb().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
|
||||
duckdb().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
object_name.to_string(),
|
||||
|
@ -806,7 +812,9 @@ fn parse_use() {
|
|||
for "e in "e_styles {
|
||||
// Test double identifier with different type of quotes
|
||||
assert_eq!(
|
||||
duckdb().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)),
|
||||
duckdb().verified_stmt(&format!(
|
||||
"USE {quote}CATALOG{quote}.{quote}my_schema{quote}"
|
||||
)),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![
|
||||
Ident::with_quote(quote, "CATALOG"),
|
||||
Ident::with_quote(quote, "my_schema")
|
||||
|
|
|
@ -341,6 +341,9 @@ fn lateral_view() {
|
|||
fn sort_by() {
|
||||
let sort_by = "SELECT * FROM db.table SORT BY a";
|
||||
hive().verified_stmt(sort_by);
|
||||
|
||||
let sort_by_with_direction = "SELECT * FROM db.table SORT BY a, b DESC";
|
||||
hive().verified_stmt(sort_by_with_direction);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -521,7 +524,7 @@ fn parse_use() {
|
|||
for object_name in &valid_object_names {
|
||||
// Test single identifier without quotes
|
||||
assert_eq!(
|
||||
hive().verified_stmt(&format!("USE {}", object_name)),
|
||||
hive().verified_stmt(&format!("USE {object_name}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||
object_name.to_string()
|
||||
)])))
|
||||
|
@ -529,7 +532,7 @@ fn parse_use() {
|
|||
for "e in "e_styles {
|
||||
// Test single identifier with different type of quotes
|
||||
assert_eq!(
|
||||
hive().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
|
||||
hive().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
object_name.to_string(),
|
||||
|
|
|
@ -153,7 +153,8 @@ fn parse_create_procedure() {
|
|||
quote_style: None,
|
||||
span: Span::empty(),
|
||||
},
|
||||
data_type: DataType::Int(None)
|
||||
data_type: DataType::Int(None),
|
||||
mode: None,
|
||||
},
|
||||
ProcedureParam {
|
||||
name: Ident {
|
||||
|
@ -164,14 +165,16 @@ fn parse_create_procedure() {
|
|||
data_type: DataType::Varchar(Some(CharacterLength::IntegerLength {
|
||||
length: 256,
|
||||
unit: None
|
||||
}))
|
||||
})),
|
||||
mode: None,
|
||||
}
|
||||
]),
|
||||
name: ObjectName::from(vec![Ident {
|
||||
value: "test".into(),
|
||||
quote_style: None,
|
||||
span: Span::empty(),
|
||||
}])
|
||||
}]),
|
||||
language: None,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -1670,7 +1673,7 @@ fn parse_use() {
|
|||
for object_name in &valid_object_names {
|
||||
// Test single identifier without quotes
|
||||
assert_eq!(
|
||||
ms().verified_stmt(&format!("USE {}", object_name)),
|
||||
ms().verified_stmt(&format!("USE {object_name}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||
object_name.to_string()
|
||||
)])))
|
||||
|
@ -1678,7 +1681,7 @@ fn parse_use() {
|
|||
for "e in "e_styles {
|
||||
// Test single identifier with different type of quotes
|
||||
assert_eq!(
|
||||
ms().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
|
||||
ms().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
object_name.to_string(),
|
||||
|
@ -2184,7 +2187,7 @@ fn parse_mssql_if_else() {
|
|||
"IF 1 = 1 BEGIN SET @A = 1; END ELSE SET @A = 2;"
|
||||
);
|
||||
}
|
||||
_ => panic!("Unexpected statements: {:?}", stmts),
|
||||
_ => panic!("Unexpected statements: {stmts:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2234,7 +2237,7 @@ fn test_mssql_if_statements_span() {
|
|||
Span::new(Location::new(1, 21), Location::new(1, 36))
|
||||
);
|
||||
}
|
||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
||||
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||
}
|
||||
|
||||
// Blocks
|
||||
|
@ -2255,7 +2258,7 @@ fn test_mssql_if_statements_span() {
|
|||
Span::new(Location::new(1, 32), Location::new(1, 57))
|
||||
);
|
||||
}
|
||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
||||
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -593,7 +593,7 @@ fn parse_use() {
|
|||
for object_name in &valid_object_names {
|
||||
// Test single identifier without quotes
|
||||
assert_eq!(
|
||||
mysql_and_generic().verified_stmt(&format!("USE {}", object_name)),
|
||||
mysql_and_generic().verified_stmt(&format!("USE {object_name}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||
object_name.to_string()
|
||||
)])))
|
||||
|
@ -601,8 +601,7 @@ fn parse_use() {
|
|||
for "e in "e_styles {
|
||||
// Test single identifier with different type of quotes
|
||||
assert_eq!(
|
||||
mysql_and_generic()
|
||||
.verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
|
||||
mysql_and_generic().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
object_name.to_string(),
|
||||
|
@ -670,6 +669,20 @@ fn table_constraint_unique_primary_ctor(
|
|||
characteristics: Option<ConstraintCharacteristics>,
|
||||
unique_index_type_display: Option<KeyOrIndexDisplay>,
|
||||
) -> TableConstraint {
|
||||
let columns = columns
|
||||
.into_iter()
|
||||
.map(|ident| IndexColumn {
|
||||
column: OrderByExpr {
|
||||
expr: Expr::Identifier(ident),
|
||||
options: OrderByOptions {
|
||||
asc: None,
|
||||
nulls_first: None,
|
||||
},
|
||||
with_fill: None,
|
||||
},
|
||||
operator_class: None,
|
||||
})
|
||||
.collect();
|
||||
match unique_index_type_display {
|
||||
Some(index_type_display) => TableConstraint::Unique {
|
||||
name,
|
||||
|
@ -795,6 +808,67 @@ fn parse_create_table_primary_and_unique_key_with_index_options() {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_prefix_key_part() {
|
||||
let expected = vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::value(
|
||||
number("10"),
|
||||
)))];
|
||||
for sql in [
|
||||
"CREATE INDEX idx_index ON t(textcol(10))",
|
||||
"ALTER TABLE tab ADD INDEX idx_index (textcol(10))",
|
||||
"ALTER TABLE tab ADD PRIMARY KEY (textcol(10))",
|
||||
"ALTER TABLE tab ADD UNIQUE KEY (textcol(10))",
|
||||
"ALTER TABLE tab ADD UNIQUE KEY (textcol(10))",
|
||||
"ALTER TABLE tab ADD FULLTEXT INDEX (textcol(10))",
|
||||
"CREATE TABLE t (textcol TEXT, INDEX idx_index (textcol(10)))",
|
||||
] {
|
||||
match index_column(mysql_and_generic().verified_stmt(sql)) {
|
||||
Expr::Function(Function {
|
||||
name,
|
||||
args: FunctionArguments::List(FunctionArgumentList { args, .. }),
|
||||
..
|
||||
}) => {
|
||||
assert_eq!(name.to_string(), "textcol");
|
||||
assert_eq!(args, expected);
|
||||
}
|
||||
expr => panic!("unexpected expression {expr} for {sql}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_functional_key_part() {
|
||||
assert_eq!(
|
||||
index_column(
|
||||
mysql_and_generic()
|
||||
.verified_stmt("CREATE INDEX idx_index ON t((col COLLATE utf8mb4_bin) DESC)")
|
||||
),
|
||||
Expr::Nested(Box::new(Expr::Collate {
|
||||
expr: Box::new(Expr::Identifier("col".into())),
|
||||
collation: ObjectName(vec![sqlparser::ast::ObjectNamePart::Identifier(
|
||||
Ident::new("utf8mb4_bin")
|
||||
)]),
|
||||
}))
|
||||
);
|
||||
assert_eq!(
|
||||
index_column(mysql_and_generic().verified_stmt(
|
||||
r#"CREATE TABLE t (jsoncol JSON, PRIMARY KEY ((CAST(col ->> '$.id' AS UNSIGNED)) ASC))"#
|
||||
)),
|
||||
Expr::Nested(Box::new(Expr::Cast {
|
||||
kind: CastKind::Cast,
|
||||
expr: Box::new(Expr::BinaryOp {
|
||||
left: Box::new(Expr::Identifier(Ident::new("col"))),
|
||||
op: BinaryOperator::LongArrow,
|
||||
right: Box::new(Expr::Value(
|
||||
Value::SingleQuotedString("$.id".to_string()).with_empty_span()
|
||||
)),
|
||||
}),
|
||||
data_type: DataType::Unsigned,
|
||||
format: None,
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_create_table_primary_and_unique_key_with_index_type() {
|
||||
let sqls = ["UNIQUE", "PRIMARY KEY"].map(|key_ty| {
|
||||
|
@ -2188,11 +2262,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
|||
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
|
||||
assert_eq!(&[Ident::new("t"), Ident::new("15to29")], &parts[..]);
|
||||
}
|
||||
proj => panic!("Unexpected projection: {:?}", proj),
|
||||
proj => panic!("Unexpected projection: {proj:?}"),
|
||||
},
|
||||
body => panic!("Unexpected statement body: {:?}", body),
|
||||
body => panic!("Unexpected statement body: {body:?}"),
|
||||
},
|
||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
||||
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||
}
|
||||
|
||||
// Case 2: Qualified column name that starts with digits and on its own represents a number.
|
||||
|
@ -2202,11 +2276,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
|||
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
|
||||
assert_eq!(&[Ident::new("t"), Ident::new("15e29")], &parts[..]);
|
||||
}
|
||||
proj => panic!("Unexpected projection: {:?}", proj),
|
||||
proj => panic!("Unexpected projection: {proj:?}"),
|
||||
},
|
||||
body => panic!("Unexpected statement body: {:?}", body),
|
||||
body => panic!("Unexpected statement body: {body:?}"),
|
||||
},
|
||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
||||
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||
}
|
||||
|
||||
// Case 3: Unqualified, the same token is parsed as a number.
|
||||
|
@ -2220,11 +2294,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
|||
Some(SelectItem::UnnamedExpr(Expr::Value(ValueWithSpan { value, .. }))) => {
|
||||
assert_eq!(&number("15e29"), value);
|
||||
}
|
||||
proj => panic!("Unexpected projection: {:?}", proj),
|
||||
proj => panic!("Unexpected projection: {proj:?}"),
|
||||
},
|
||||
body => panic!("Unexpected statement body: {:?}", body),
|
||||
body => panic!("Unexpected statement body: {body:?}"),
|
||||
},
|
||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
||||
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||
}
|
||||
|
||||
// Case 4: Quoted simple identifier.
|
||||
|
@ -2234,11 +2308,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
|||
Some(SelectItem::UnnamedExpr(Expr::Identifier(name))) => {
|
||||
assert_eq!(&Ident::with_quote('`', "15e29"), name);
|
||||
}
|
||||
proj => panic!("Unexpected projection: {:?}", proj),
|
||||
proj => panic!("Unexpected projection: {proj:?}"),
|
||||
},
|
||||
body => panic!("Unexpected statement body: {:?}", body),
|
||||
body => panic!("Unexpected statement body: {body:?}"),
|
||||
},
|
||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
||||
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||
}
|
||||
|
||||
// Case 5: Quoted compound identifier.
|
||||
|
@ -2251,11 +2325,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
|||
&parts[..]
|
||||
);
|
||||
}
|
||||
proj => panic!("Unexpected projection: {:?}", proj),
|
||||
proj => panic!("Unexpected projection: {proj:?}"),
|
||||
},
|
||||
body => panic!("Unexpected statement body: {:?}", body),
|
||||
body => panic!("Unexpected statement body: {body:?}"),
|
||||
},
|
||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
||||
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||
}
|
||||
|
||||
// Case 6: Multi-level compound identifiers.
|
||||
|
@ -2272,11 +2346,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
|||
&parts[..]
|
||||
);
|
||||
}
|
||||
proj => panic!("Unexpected projection: {:?}", proj),
|
||||
proj => panic!("Unexpected projection: {proj:?}"),
|
||||
},
|
||||
body => panic!("Unexpected statement body: {:?}", body),
|
||||
body => panic!("Unexpected statement body: {body:?}"),
|
||||
},
|
||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
||||
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||
}
|
||||
|
||||
// Case 7: Multi-level compound quoted identifiers.
|
||||
|
@ -2293,11 +2367,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
|||
&parts[..]
|
||||
);
|
||||
}
|
||||
proj => panic!("Unexpected projection: {:?}", proj),
|
||||
proj => panic!("Unexpected projection: {proj:?}"),
|
||||
},
|
||||
body => panic!("Unexpected statement body: {:?}", body),
|
||||
body => panic!("Unexpected statement body: {body:?}"),
|
||||
},
|
||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
||||
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3988,6 +4062,13 @@ fn parse_straight_join() {
|
|||
.verified_stmt("SELECT a.*, b.* FROM table_a STRAIGHT_JOIN table_b AS b ON a.b_id = b.id");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mysql_foreign_key_with_index_name() {
|
||||
mysql().verified_stmt(
|
||||
"CREATE TABLE orders (customer_id INT, INDEX idx_customer (customer_id), CONSTRAINT fk_customer FOREIGN KEY idx_customer (customer_id) REFERENCES customers(id))",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_drop_index() {
|
||||
let sql = "DROP INDEX idx_name ON table_name";
|
||||
|
@ -4018,3 +4099,13 @@ fn parse_drop_index() {
|
|||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_alter_table_drop_index() {
|
||||
assert_matches!(
|
||||
alter_table_op(
|
||||
mysql_and_generic().verified_stmt("ALTER TABLE tab DROP INDEX idx_index")
|
||||
),
|
||||
AlterTableOperation::DropIndex { name } if name.value == "idx_index"
|
||||
);
|
||||
}
|
||||
|
|
|
@ -2535,12 +2535,12 @@ fn parse_create_indices_with_operator_classes() {
|
|||
for expected_operator_class in &operator_classes {
|
||||
let single_column_sql_statement = format!(
|
||||
"CREATE INDEX the_index_name ON users USING {expected_index_type} (concat_users_name(first_name, last_name){})",
|
||||
expected_operator_class.as_ref().map(|oc| format!(" {}", oc))
|
||||
expected_operator_class.as_ref().map(|oc| format!(" {oc}"))
|
||||
.unwrap_or_default()
|
||||
);
|
||||
let multi_column_sql_statement = format!(
|
||||
"CREATE INDEX the_index_name ON users USING {expected_index_type} (column_name,concat_users_name(first_name, last_name){})",
|
||||
expected_operator_class.as_ref().map(|oc| format!(" {}", oc))
|
||||
expected_operator_class.as_ref().map(|oc| format!(" {oc}"))
|
||||
.unwrap_or_default()
|
||||
);
|
||||
|
||||
|
@ -3273,7 +3273,7 @@ fn test_fn_arg_with_value_operator() {
|
|||
assert!(matches!(
|
||||
&args[..],
|
||||
&[FunctionArg::ExprNamed { operator: FunctionArgOperator::Value, .. }]
|
||||
), "Invalid function argument: {:?}", args);
|
||||
), "Invalid function argument: {args:?}");
|
||||
}
|
||||
other => panic!("Expected: JSON_OBJECT('name' VALUE 'value') to be parsed as a function, but got {other:?}"),
|
||||
}
|
||||
|
@ -4788,13 +4788,13 @@ fn parse_truncate() {
|
|||
let table_name = ObjectName::from(vec![Ident::new("db"), Ident::new("table_name")]);
|
||||
let table_names = vec![TruncateTableTarget {
|
||||
name: table_name.clone(),
|
||||
only: false,
|
||||
}];
|
||||
assert_eq!(
|
||||
Statement::Truncate {
|
||||
table_names,
|
||||
partitions: None,
|
||||
table: false,
|
||||
only: false,
|
||||
identity: None,
|
||||
cascade: None,
|
||||
on_cluster: None,
|
||||
|
@ -4811,6 +4811,7 @@ fn parse_truncate_with_options() {
|
|||
let table_name = ObjectName::from(vec![Ident::new("db"), Ident::new("table_name")]);
|
||||
let table_names = vec![TruncateTableTarget {
|
||||
name: table_name.clone(),
|
||||
only: true,
|
||||
}];
|
||||
|
||||
assert_eq!(
|
||||
|
@ -4818,7 +4819,6 @@ fn parse_truncate_with_options() {
|
|||
table_names,
|
||||
partitions: None,
|
||||
table: true,
|
||||
only: true,
|
||||
identity: Some(TruncateIdentityOption::Restart),
|
||||
cascade: Some(CascadeOption::Cascade),
|
||||
on_cluster: None,
|
||||
|
@ -4839,9 +4839,11 @@ fn parse_truncate_with_table_list() {
|
|||
let table_names = vec![
|
||||
TruncateTableTarget {
|
||||
name: table_name_a.clone(),
|
||||
only: false,
|
||||
},
|
||||
TruncateTableTarget {
|
||||
name: table_name_b.clone(),
|
||||
only: false,
|
||||
},
|
||||
];
|
||||
|
||||
|
@ -4850,7 +4852,6 @@ fn parse_truncate_with_table_list() {
|
|||
table_names,
|
||||
partitions: None,
|
||||
table: true,
|
||||
only: false,
|
||||
identity: Some(TruncateIdentityOption::Restart),
|
||||
cascade: Some(CascadeOption::Cascade),
|
||||
on_cluster: None,
|
||||
|
@ -5377,6 +5378,7 @@ fn parse_create_domain() {
|
|||
op: BinaryOperator::Gt,
|
||||
right: Box::new(Expr::Value(test_utils::number("0").into())),
|
||||
}),
|
||||
enforced: None,
|
||||
}],
|
||||
});
|
||||
|
||||
|
@ -5395,6 +5397,7 @@ fn parse_create_domain() {
|
|||
op: BinaryOperator::Gt,
|
||||
right: Box::new(Expr::Value(test_utils::number("0").into())),
|
||||
}),
|
||||
enforced: None,
|
||||
}],
|
||||
});
|
||||
|
||||
|
@ -5413,6 +5416,7 @@ fn parse_create_domain() {
|
|||
op: BinaryOperator::Gt,
|
||||
right: Box::new(Expr::Value(test_utils::number("0").into())),
|
||||
}),
|
||||
enforced: None,
|
||||
}],
|
||||
});
|
||||
|
||||
|
@ -5431,6 +5435,7 @@ fn parse_create_domain() {
|
|||
op: BinaryOperator::Gt,
|
||||
right: Box::new(Expr::Value(test_utils::number("0").into())),
|
||||
}),
|
||||
enforced: None,
|
||||
}],
|
||||
});
|
||||
|
||||
|
@ -5449,6 +5454,7 @@ fn parse_create_domain() {
|
|||
op: BinaryOperator::Gt,
|
||||
right: Box::new(Expr::Value(test_utils::number("0").into())),
|
||||
}),
|
||||
enforced: None,
|
||||
}],
|
||||
});
|
||||
|
||||
|
@ -5673,7 +5679,7 @@ fn parse_drop_trigger() {
|
|||
"DROP TRIGGER{} check_update ON table_name{}",
|
||||
if if_exists { " IF EXISTS" } else { "" },
|
||||
option
|
||||
.map(|o| format!(" {}", o))
|
||||
.map(|o| format!(" {o}"))
|
||||
.unwrap_or_else(|| "".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -5767,8 +5773,7 @@ fn parse_trigger_related_functions() {
|
|||
// Now we parse the statements and check if they are parsed correctly.
|
||||
let mut statements = pg()
|
||||
.parse_sql_statements(&format!(
|
||||
"{}{}{}{}",
|
||||
sql_table_creation, sql_create_function, sql_create_trigger, sql_drop_trigger
|
||||
"{sql_table_creation}{sql_create_function}{sql_create_trigger}{sql_drop_trigger}"
|
||||
))
|
||||
.unwrap();
|
||||
|
||||
|
@ -6195,3 +6200,34 @@ fn parse_alter_table_replica_identity() {
|
|||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_ts_datatypes() {
|
||||
match pg_and_generic().verified_stmt("CREATE TABLE foo (x TSVECTOR)") {
|
||||
Statement::CreateTable(CreateTable { columns, .. }) => {
|
||||
assert_eq!(
|
||||
columns,
|
||||
vec![ColumnDef {
|
||||
name: "x".into(),
|
||||
data_type: DataType::TsVector,
|
||||
options: vec![],
|
||||
}]
|
||||
);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
match pg_and_generic().verified_stmt("CREATE TABLE foo (x TSQUERY)") {
|
||||
Statement::CreateTable(CreateTable { columns, .. }) => {
|
||||
assert_eq!(
|
||||
columns,
|
||||
vec![ColumnDef {
|
||||
name: "x".into(),
|
||||
data_type: DataType::TsQuery,
|
||||
options: vec![],
|
||||
}]
|
||||
);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -446,19 +446,56 @@ fn test_snowflake_create_table_if_not_exists() {
|
|||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
for (sql, parse_to) in [
|
||||
(
|
||||
r#"CREATE TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#,
|
||||
r#"CREATE TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#,
|
||||
),
|
||||
(
|
||||
r#"CREATE TABLE "A"."B"."C" IF NOT EXISTS (v VARIANT)"#,
|
||||
r#"CREATE TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#,
|
||||
),
|
||||
(
|
||||
r#"CREATE TRANSIENT TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#,
|
||||
r#"CREATE TRANSIENT TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#,
|
||||
),
|
||||
(
|
||||
r#"CREATE TRANSIENT TABLE "A"."B"."C" IF NOT EXISTS (v VARIANT)"#,
|
||||
r#"CREATE TRANSIENT TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#,
|
||||
),
|
||||
] {
|
||||
snowflake().one_statement_parses_to(sql, parse_to);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snowflake_create_table_cluster_by() {
|
||||
match snowflake().verified_stmt("CREATE TABLE my_table (a INT) CLUSTER BY (a, b)") {
|
||||
match snowflake().verified_stmt("CREATE TABLE my_table (a INT) CLUSTER BY (a, b, my_func(c))") {
|
||||
Statement::CreateTable(CreateTable {
|
||||
name, cluster_by, ..
|
||||
}) => {
|
||||
assert_eq!("my_table", name.to_string());
|
||||
assert_eq!(
|
||||
Some(WrappedCollection::Parentheses(vec![
|
||||
Ident::new("a"),
|
||||
Ident::new("b"),
|
||||
Expr::Identifier(Ident::new("a")),
|
||||
Expr::Identifier(Ident::new("b")),
|
||||
Expr::Function(Function {
|
||||
name: ObjectName::from(vec![Ident::new("my_func")]),
|
||||
uses_odbc_syntax: false,
|
||||
parameters: FunctionArguments::None,
|
||||
args: FunctionArguments::List(FunctionArgumentList {
|
||||
args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(
|
||||
Expr::Identifier(Ident::new("c"))
|
||||
))],
|
||||
duplicate_treatment: None,
|
||||
clauses: vec![],
|
||||
}),
|
||||
filter: None,
|
||||
null_treatment: None,
|
||||
over: None,
|
||||
within_group: vec![],
|
||||
}),
|
||||
])),
|
||||
cluster_by
|
||||
)
|
||||
|
@ -882,8 +919,8 @@ fn test_snowflake_create_iceberg_table_all_options() {
|
|||
assert_eq!("my_table", name.to_string());
|
||||
assert_eq!(
|
||||
Some(WrappedCollection::Parentheses(vec![
|
||||
Ident::new("a"),
|
||||
Ident::new("b"),
|
||||
Expr::Identifier(Ident::new("a")),
|
||||
Expr::Identifier(Ident::new("b")),
|
||||
])),
|
||||
cluster_by
|
||||
);
|
||||
|
@ -2473,10 +2510,7 @@ fn test_snowflake_stage_object_names_into_location() {
|
|||
.zip(allowed_object_names.iter_mut())
|
||||
{
|
||||
let (formatted_name, object_name) = it;
|
||||
let sql = format!(
|
||||
"COPY INTO {} FROM 'gcs://mybucket/./../a.csv'",
|
||||
formatted_name
|
||||
);
|
||||
let sql = format!("COPY INTO {formatted_name} FROM 'gcs://mybucket/./../a.csv'");
|
||||
match snowflake().verified_stmt(&sql) {
|
||||
Statement::CopyIntoSnowflake { into, .. } => {
|
||||
assert_eq!(into.0, object_name.0)
|
||||
|
@ -2499,10 +2533,7 @@ fn test_snowflake_stage_object_names_into_table() {
|
|||
.zip(allowed_object_names.iter_mut())
|
||||
{
|
||||
let (formatted_name, object_name) = it;
|
||||
let sql = format!(
|
||||
"COPY INTO {} FROM 'gcs://mybucket/./../a.csv'",
|
||||
formatted_name
|
||||
);
|
||||
let sql = format!("COPY INTO {formatted_name} FROM 'gcs://mybucket/./../a.csv'");
|
||||
match snowflake().verified_stmt(&sql) {
|
||||
Statement::CopyIntoSnowflake { into, .. } => {
|
||||
assert_eq!(into.0, object_name.0)
|
||||
|
@ -2983,7 +3014,7 @@ fn parse_use() {
|
|||
for object_name in &valid_object_names {
|
||||
// Test single identifier without quotes
|
||||
assert_eq!(
|
||||
snowflake().verified_stmt(&format!("USE {}", object_name)),
|
||||
snowflake().verified_stmt(&format!("USE {object_name}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||
object_name.to_string()
|
||||
)])))
|
||||
|
@ -2991,7 +3022,7 @@ fn parse_use() {
|
|||
for "e in "e_styles {
|
||||
// Test single identifier with different type of quotes
|
||||
assert_eq!(
|
||||
snowflake().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
|
||||
snowflake().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
object_name.to_string(),
|
||||
|
@ -3003,7 +3034,9 @@ fn parse_use() {
|
|||
for "e in "e_styles {
|
||||
// Test double identifier with different type of quotes
|
||||
assert_eq!(
|
||||
snowflake().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)),
|
||||
snowflake().verified_stmt(&format!(
|
||||
"USE {quote}CATALOG{quote}.{quote}my_schema{quote}"
|
||||
)),
|
||||
Statement::Use(Use::Object(ObjectName::from(vec![
|
||||
Ident::with_quote(quote, "CATALOG"),
|
||||
Ident::with_quote(quote, "my_schema")
|
||||
|
@ -3022,35 +3055,37 @@ fn parse_use() {
|
|||
for "e in "e_styles {
|
||||
// Test single and double identifier with keyword and different type of quotes
|
||||
assert_eq!(
|
||||
snowflake().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)),
|
||||
snowflake().verified_stmt(&format!("USE DATABASE {quote}my_database{quote}")),
|
||||
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
"my_database".to_string(),
|
||||
)])))
|
||||
);
|
||||
assert_eq!(
|
||||
snowflake().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)),
|
||||
snowflake().verified_stmt(&format!("USE SCHEMA {quote}my_schema{quote}")),
|
||||
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
"my_schema".to_string(),
|
||||
)])))
|
||||
);
|
||||
assert_eq!(
|
||||
snowflake().verified_stmt(&format!("USE SCHEMA {0}CATALOG{0}.{0}my_schema{0}", quote)),
|
||||
snowflake().verified_stmt(&format!(
|
||||
"USE SCHEMA {quote}CATALOG{quote}.{quote}my_schema{quote}"
|
||||
)),
|
||||
Statement::Use(Use::Schema(ObjectName::from(vec![
|
||||
Ident::with_quote(quote, "CATALOG"),
|
||||
Ident::with_quote(quote, "my_schema")
|
||||
])))
|
||||
);
|
||||
assert_eq!(
|
||||
snowflake().verified_stmt(&format!("USE ROLE {0}my_role{0}", quote)),
|
||||
snowflake().verified_stmt(&format!("USE ROLE {quote}my_role{quote}")),
|
||||
Statement::Use(Use::Role(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
"my_role".to_string(),
|
||||
)])))
|
||||
);
|
||||
assert_eq!(
|
||||
snowflake().verified_stmt(&format!("USE WAREHOUSE {0}my_wh{0}", quote)),
|
||||
snowflake().verified_stmt(&format!("USE WAREHOUSE {quote}my_wh{quote}")),
|
||||
Statement::Use(Use::Warehouse(ObjectName::from(vec![Ident::with_quote(
|
||||
quote,
|
||||
"my_wh".to_string(),
|
||||
|
@ -3087,7 +3122,7 @@ fn view_comment_option_should_be_after_column_list() {
|
|||
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') COMMENT = 'Comment' AS SELECT a FROM t",
|
||||
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') WITH (foo = bar) COMMENT = 'Comment' AS SELECT a FROM t",
|
||||
] {
|
||||
snowflake_and_generic()
|
||||
snowflake()
|
||||
.verified_stmt(sql);
|
||||
}
|
||||
}
|
||||
|
@ -3096,7 +3131,7 @@ fn view_comment_option_should_be_after_column_list() {
|
|||
fn parse_view_column_descriptions() {
|
||||
let sql = "CREATE OR REPLACE VIEW v (a COMMENT 'Comment', b) AS SELECT a, b FROM table1";
|
||||
|
||||
match snowflake_and_generic().verified_stmt(sql) {
|
||||
match snowflake().verified_stmt(sql) {
|
||||
Statement::CreateView { name, columns, .. } => {
|
||||
assert_eq!(name.to_string(), "v");
|
||||
assert_eq!(
|
||||
|
@ -3105,7 +3140,9 @@ fn parse_view_column_descriptions() {
|
|||
ViewColumnDef {
|
||||
name: Ident::new("a"),
|
||||
data_type: None,
|
||||
options: Some(vec![ColumnOption::Comment("Comment".to_string())]),
|
||||
options: Some(ColumnOptions::SpaceSeparated(vec![ColumnOption::Comment(
|
||||
"Comment".to_string()
|
||||
)])),
|
||||
},
|
||||
ViewColumnDef {
|
||||
name: Ident::new("b"),
|
||||
|
@ -3590,7 +3627,7 @@ fn test_alter_session_followed_by_statement() {
|
|||
.unwrap();
|
||||
match stmts[..] {
|
||||
[Statement::AlterSession { .. }, Statement::Query { .. }] => {}
|
||||
_ => panic!("Unexpected statements: {:?}", stmts),
|
||||
_ => panic!("Unexpected statements: {stmts:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4045,3 +4082,93 @@ fn parse_connect_by_root_operator() {
|
|||
"sql parser error: Expected an expression, found: FROM"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_begin_exception_end() {
|
||||
for sql in [
|
||||
"BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE; END",
|
||||
"BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE EX_1; END",
|
||||
"BEGIN SELECT 1; EXCEPTION WHEN FOO THEN SELECT 2; WHEN OTHER THEN SELECT 3; RAISE; END",
|
||||
"BEGIN BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE; END; END",
|
||||
] {
|
||||
snowflake().verified_stmt(sql);
|
||||
}
|
||||
|
||||
let sql = r#"
|
||||
DECLARE
|
||||
EXCEPTION_1 EXCEPTION (-20001, 'I caught the expected exception.');
|
||||
EXCEPTION_2 EXCEPTION (-20002, 'Not the expected exception!');
|
||||
EXCEPTION_3 EXCEPTION (-20003, 'The worst exception...');
|
||||
BEGIN
|
||||
BEGIN
|
||||
SELECT 1;
|
||||
EXCEPTION
|
||||
WHEN EXCEPTION_1 THEN
|
||||
SELECT 1;
|
||||
WHEN EXCEPTION_2 OR EXCEPTION_3 THEN
|
||||
SELECT 2;
|
||||
SELECT 3;
|
||||
WHEN OTHER THEN
|
||||
SELECT 4;
|
||||
RAISE;
|
||||
END;
|
||||
END
|
||||
"#;
|
||||
|
||||
// Outer `BEGIN` of the two nested `BEGIN` statements.
|
||||
let Statement::StartTransaction { mut statements, .. } = snowflake()
|
||||
.parse_sql_statements(sql)
|
||||
.unwrap()
|
||||
.pop()
|
||||
.unwrap()
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
// Inner `BEGIN` of the two nested `BEGIN` statements.
|
||||
let Statement::StartTransaction {
|
||||
statements,
|
||||
exception,
|
||||
has_end_keyword,
|
||||
..
|
||||
} = statements.pop().unwrap()
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
assert_eq!(1, statements.len());
|
||||
assert!(has_end_keyword);
|
||||
|
||||
let exception = exception.unwrap();
|
||||
assert_eq!(3, exception.len());
|
||||
assert_eq!(1, exception[0].idents.len());
|
||||
assert_eq!(1, exception[0].statements.len());
|
||||
assert_eq!(2, exception[1].idents.len());
|
||||
assert_eq!(2, exception[1].statements.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snowflake_fetch_clause_syntax() {
|
||||
let canonical = "SELECT c1 FROM fetch_test FETCH FIRST 2 ROWS ONLY";
|
||||
snowflake().verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH 2", canonical);
|
||||
|
||||
snowflake()
|
||||
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH FIRST 2", canonical);
|
||||
snowflake()
|
||||
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH NEXT 2", canonical);
|
||||
|
||||
snowflake()
|
||||
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH 2 ROW", canonical);
|
||||
|
||||
snowflake().verified_only_select_with_canonical(
|
||||
"SELECT c1 FROM fetch_test FETCH FIRST 2 ROWS",
|
||||
canonical,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snowflake_create_view_with_multiple_column_options() {
|
||||
let create_view_with_tag =
|
||||
r#"CREATE VIEW X (COL WITH TAG (pii='email') COMMENT 'foobar') AS SELECT * FROM Y"#;
|
||||
snowflake().verified_stmt(create_view_with_tag);
|
||||
}
|
||||
|
|
|
@ -324,7 +324,7 @@ fn parse_create_table_on_conflict_col() {
|
|||
Keyword::IGNORE,
|
||||
Keyword::REPLACE,
|
||||
] {
|
||||
let sql = format!("CREATE TABLE t1 (a INT, b INT ON CONFLICT {:?})", keyword);
|
||||
let sql = format!("CREATE TABLE t1 (a INT, b INT ON CONFLICT {keyword:?})");
|
||||
match sqlite_and_generic().verified_stmt(&sql) {
|
||||
Statement::CreateTable(CreateTable { columns, .. }) => {
|
||||
assert_eq!(
|
||||
|
@ -410,7 +410,7 @@ fn parse_window_function_with_filter() {
|
|||
"count",
|
||||
"user_defined_function",
|
||||
] {
|
||||
let sql = format!("SELECT {}(x) FILTER (WHERE y) OVER () FROM t", func_name);
|
||||
let sql = format!("SELECT {func_name}(x) FILTER (WHERE y) OVER () FROM t");
|
||||
let select = sqlite().verified_only_select(&sql);
|
||||
assert_eq!(select.to_string(), sql);
|
||||
assert_eq!(
|
||||
|
@ -444,7 +444,7 @@ fn parse_window_function_with_filter() {
|
|||
fn parse_attach_database() {
|
||||
let sql = "ATTACH DATABASE 'test.db' AS test";
|
||||
let verified_stmt = sqlite().verified_stmt(sql);
|
||||
assert_eq!(sql, format!("{}", verified_stmt));
|
||||
assert_eq!(sql, format!("{verified_stmt}"));
|
||||
match verified_stmt {
|
||||
Statement::AttachDatabase {
|
||||
schema_name,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue