Compare commits

...

74 commits

Author SHA1 Message Date
Yoav Cohen
93450cc250
Add Snowflake COPY/REVOKE CURRENT GRANTS option (#1926)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-07-07 17:13:57 +02:00
Sergey Olontsev
1a33abda63
Clickhouse: support empty parenthesized options (#1925)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
2025-07-06 09:06:20 +02:00
Yoav Cohen
f2fba48a7a
Add support for several Snowflake grant statements (#1922) 2025-07-06 08:58:19 +02:00
Simon Vandel Sillesen
cf9e50474e
Make GenericDialect support trailing commas in projections (#1921) 2025-07-06 08:57:20 +02:00
Yoav Cohen
ed8757f2f0
Align Snowflake dialect to new test of reserved keywords (#1924)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-07-05 08:40:35 +02:00
Yoav Cohen
d2466af20a
Add support for dropping multiple columns in Snowflake (#1918) 2025-07-05 08:18:58 +02:00
Yoav Cohen
b0bcc46e22
Add support for NULL escape char in pattern match searches (#1913)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-07-04 21:04:51 +02:00
Elia Perantoni
942d747d89
Change tag and policy names to ObjectName (#1892) 2025-07-04 18:21:31 +02:00
Sergey Olontsev
239e30a97c
Support for Postgres CREATE SERVER (#1914)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
Co-authored-by: Ifeanyi Ubah <ify1992@yahoo.com>
2025-07-03 19:04:32 +02:00
feral-dot-io
9020385c02
Add span for Expr::TypedString (#1919) 2025-07-03 18:24:51 +02:00
Yoav Cohen
be2d2f14e7
Add support for MySQL MEMBER OF (#1917) 2025-07-03 18:22:17 +02:00
carl
418b94227a
Postgres: support ADD CONSTRAINT NOT VALID and VALIDATE CONSTRAINT (#1908) 2025-07-03 18:19:26 +02:00
Yoav Cohen
015caca611
Redshift alter column type no set (#1912) 2025-07-03 18:16:21 +02:00
Ryan Schneider
a3398223d7
DuckDB: Add support for multiple TRIM arguments (#1916)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
Co-authored-by: Ifeanyi Ubah <ify1992@yahoo.com>
2025-07-02 14:57:08 +02:00
Yoav Cohen
f32a41a004
Redshift utf8 idents (#1915)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-07-01 13:52:29 +02:00
Simon Vandel Sillesen
9ffc546870
Make GenericDialect support from-first syntax (#1911) 2025-07-01 13:19:40 +02:00
Simon Vandel Sillesen
abd80f9ecb
Support remaining pipe operators (#1879)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-30 17:51:55 +02:00
Dima
3bc94234df
Fix join precedence for non-snowflake queries (#1905)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
2025-06-28 20:24:25 +02:00
Sergey Olontsev
50c605a471
Support for Map values in ClickHouse settings (#1896)
Some checks are pending
Rust / test (beta) (push) Waiting to run
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Co-authored-by: Ifeanyi Ubah <ify1992@yahoo.com>
2025-06-28 08:13:11 +02:00
Yoav Cohen
6c38cdcadb
Snowflake: Add support for future grants (#1906)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-27 21:21:38 +02:00
Ifeanyi Ubah
5f2b5fe7be
Fix clippy lints on 1.88.0 (#1910) 2025-06-27 14:21:17 -04:00
ZacJW
95d16e3b2d
Add support for LANGUAGE clause in CREATE PROCEDURE (#1903) 2025-06-27 18:22:21 +02:00
Elia Perantoni
1bbc05cdff
Snowflake: support multiple column options in CREATE VIEW (#1891)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
2025-06-25 16:10:01 +02:00
Elia Perantoni
b2ab0061c1
Fix impl Ord for Ident (#1893)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-25 12:21:59 +02:00
ZacJW
b9365b3853
Support procedure argmode (#1901)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-24 08:39:02 +02:00
Denys Tsomenko
44f3be38e5
fix: parse snowflake fetch clause (#1894) 2025-06-24 08:29:44 +02:00
Michael Victor Zink
5d63663bc6
Use IndexColumn in all index definitions (#1900) 2025-06-24 08:18:03 +02:00
Dima
7865de015f
Fix limit in subqueries (#1899)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
2025-06-22 09:22:45 +02:00
Mohamed Abdeen
1d0dc7cdd8
Postgres: Add support for text search types (#1889) 2025-06-22 09:02:51 +02:00
Simon Sawert
204d3b484d
Extend exception handling (#1884)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-21 08:12:07 +02:00
hulk
185a490218
Fix parsing error when having fields after nested struct in BigQuery (#1897)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-20 16:56:26 +02:00
hulk
b1b379e570
Add support of parsing struct field's options in BigQuery (#1890)
Some checks failed
license / Release Audit Tool (RAT) (push) Has been cancelled
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
Co-authored-by: Ifeanyi Ubah <ify1992@yahoo.com>
2025-06-18 07:00:53 +02:00
Andrew Lamb
be30697efb
Add license header check to CI (#1888)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-17 10:46:58 -04:00
Andrew Lamb
6f423969b0
Add license header to display_utils.rs and pretty_print.rs (#1887)
Some checks are pending
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-16 18:37:18 +02:00
Artem Osipov
e406422bac
Add support for cluster by expressions (#1883)
Some checks are pending
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
Co-authored-by: osipovartem <artem@PC.localdomain>
2025-06-16 12:33:16 +02:00
Andrew Lamb
0f2208d293
Prepare for 0.57.0 release (#1885)
Some checks failed
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
2025-06-14 06:45:34 +02:00
Simon Sawert
703ba2cf48
Support DISTINCT AS { STRUCT | VALUE } for BigQuery (#1880)
Some checks failed
Rust / codestyle (push) Has been cancelled
Rust / lint (push) Has been cancelled
Rust / benchmark-lint (push) Has been cancelled
Rust / compile (push) Has been cancelled
Rust / docs (push) Has been cancelled
Rust / compile-no-std (push) Has been cancelled
Rust / test (beta) (push) Has been cancelled
Rust / test (nightly) (push) Has been cancelled
Rust / test (stable) (push) Has been cancelled
2025-06-11 18:12:30 +02:00
Simon Sawert
c2e83d49f6
Allow IF NOT EXISTS after table name for Snowflake (#1881) 2025-06-11 18:11:07 +02:00
Jacob Wujciak-Jens
9fc9009b94
chore: Replace archived actions-rs/install action (#1876)
Some checks are pending
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-10 19:54:21 -04:00
vimko
559b7e36d9
Add support for ALTER TABLE DROP INDEX (#1865)
Some checks are pending
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run
2025-06-10 18:26:07 +02:00
Yannick Utard
40d12b98bd
Add support for CREATE SCHEMA WITH ( <properties> ) (#1877)
Some checks are pending
Rust / test (stable) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
2025-06-10 06:50:10 +02:00
Mohamed Abdeen
84c3a1b325
MySQL: [[NOT] ENFORCED] in CHECK constraint (#1870) 2025-06-07 06:48:40 +02:00
Elia Perantoni
ff29dd25b2
Fix CASE expression spans (#1874) 2025-06-06 16:06:33 +02:00
Chen Chongchen
e2b1ae36e9
feat: Hive: support SORT BY direction (#1873) 2025-06-06 09:11:44 +02:00
Mohamed Abdeen
4cf5e571d3
Postgres: Apply ONLY keyword per table in TRUNCATE stmt (#1872) 2025-06-06 09:10:03 +02:00
Mohamed Abdeen
de2cc7b502
MySQL: Support index_name in FK constraints (#1871) 2025-06-06 09:03:59 +02:00
Artem Osipov
5327f0ce13
Add ICEBERG keyword support to ALTER TABLE statement (#1869) 2025-06-04 19:49:07 +02:00
Denys Tsomenko
394a534486
Fix: GROUPING SETS accept values without parenthesis (#1867) 2025-06-02 18:04:35 +02:00
Dmitriy Mazurin
80d47eee84
Adds support for mysql's drop index (#1864)
Co-authored-by: Ifeanyi Ubah <ify1992@yahoo.com>
2025-05-30 09:16:36 +02:00
Hendrik Makait
a8bde39efb
Add support for TABLESAMPLE pipe operator (#1860) 2025-05-30 09:14:36 +02:00
Andrew Harper
eacf00d269
Add support for parameter default values in SQL Server (#1866) 2025-05-29 11:49:28 +02:00
hulk
9159d08c5e
Keep the COLUMN keyword only if it exists when dropping the column (#1862) 2025-05-28 07:09:40 +02:00
Andrew Harper
301726541a
Add support for table valued functions for SQL Server (#1839) 2025-05-23 07:19:16 +02:00
Mohamed Abdeen
bf2b72fbe0
Mysql: Add SRID column option (#1852) 2025-05-23 07:09:05 +02:00
Luca Cappelletti
05d7ffb1d5
Handle optional datatypes properly in CREATE FUNCTION statements (#1826)
Co-authored-by: Ifeanyi Ubah <ify1992@yahoo.com>
2025-05-21 05:49:28 +02:00
Ophir LOJKINE
3f4d5f96ee
pretty-print CREATE VIEW statements (#1855) 2025-05-21 05:44:33 +02:00
Ophir LOJKINE
a496f78803
pretty-print CREATE TABLE statements (#1854) 2025-05-20 12:34:48 -04:00
dependabot[bot]
525ed81fde
Update criterion requirement from 0.5 to 0.6 in /sqlparser_bench (#1857)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: ifeanyi <ifeanyi@validio.io>
2025-05-20 06:21:41 +02:00
Ophir LOJKINE
e7bf186e44
fix new rust 1.87 cargo clippy warnings (#1856) 2025-05-19 13:09:00 +02:00
Ophir LOJKINE
ae587dcbec
pretty print improvements (#1851) 2025-05-15 16:43:16 +02:00
Denys Tsomenko
3c59950060
Add support for INCLUDE/EXCLUDE NULLS for UNPIVOT (#1849) 2025-05-15 16:40:21 +02:00
Mohamed Abdeen
c6e897dc12
Postgresql: Add REPLICA IDENTITY operation for ALTER TABLE (#1844) 2025-05-14 09:40:44 +02:00
Andrew Harper
74a95fdbd1
Add support for DENY statements (#1836) 2025-05-14 09:21:23 +02:00
Ophir LOJKINE
178a351812
Fix big performance issue in string serialization (#1848) 2025-05-13 11:36:27 -04:00
Ophir LOJKINE
6120bb59cc
implement pretty-printing with {:#} (#1847) 2025-05-13 09:25:07 -04:00
Mohamed Abdeen
052ad4a759
Fix: parsing ident starting with underscore in certain dialects (#1835) 2025-05-10 02:14:25 +02:00
Ophir LOJKINE
2182f7ea71
Add support for the MATCH and REGEXP binary operators (#1840) 2025-05-09 01:48:23 +02:00
Andrew Harper
6cd237ea43
Allow stored procedures to be defined without BEGIN/END (#1834) 2025-05-09 01:40:03 +02:00
Luca Cappelletti
ac1c339666
Added support for CREATE DOMAIN (#1830) 2025-05-04 23:21:44 +02:00
Andrew Harper
a497358c3a
Add CREATE TRIGGER support for SQL Server (#1810) 2025-05-03 16:59:13 +02:00
benrsatori
728645fb31
Add all missing table options to be handled in any order (#1747)
Co-authored-by: Tomer Shani <tomer.shani@satoricyber.com>
2025-05-02 15:16:59 +02:00
Andrew Harper
a464f8e8d7
Improve support for cursors for SQL Server (#1831)
Co-authored-by: Ifeanyi Ubah <ify1992@yahoo.com>
2025-05-02 05:25:30 +02:00
Luca Cappelletti
483394cd1a
Added support for DROP DOMAIN (#1828) 2025-05-02 05:16:24 +02:00
Simon Vandel Sillesen
e5d2215267
Support some of pipe operators (#1759) 2025-05-02 05:13:47 +02:00
50 changed files with 7738 additions and 1590 deletions

39
.github/workflows/license.yml vendored Normal file
View file

@ -0,0 +1,39 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
name: license
# trigger for all PRs and changes to main
on:
push:
branches:
- main
pull_request:
jobs:
rat:
name: Release Audit Tool (RAT)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: 3.8
- name: Audit licenses
run: ./dev/release/run-rat.sh .

View file

@ -19,6 +19,9 @@ name: Rust
on: [push, pull_request]
permissions:
contents: read
jobs:
codestyle:
@ -85,11 +88,8 @@ jobs:
uses: ./.github/actions/setup-builder
with:
rust-version: ${{ matrix.rust }}
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Install Tarpaulin
uses: actions-rs/install@v0.1
with:
crate: cargo-tarpaulin
version: 0.14.2
use-tool-cache: true
run: cargo install cargo-tarpaulin
- name: Test
run: cargo test --all-features

View file

@ -18,7 +18,7 @@
[package]
name = "sqlparser"
description = "Extensible SQL Lexer and Parser with support for ANSI SQL:2011"
version = "0.56.0"
version = "0.57.0"
authors = ["Apache DataFusion <dev@datafusion.apache.org>"]
homepage = "https://github.com/apache/datafusion-sqlparser-rs"
documentation = "https://docs.rs/sqlparser/"

View file

@ -89,10 +89,14 @@ keywords, the following should hold true for all SQL:
```rust
// Parse SQL
let sql = "SELECT 'hello'";
let ast = Parser::parse_sql(&GenericDialect, sql).unwrap();
// The original SQL text can be generated from the AST
assert_eq!(ast[0].to_string(), sql);
// The SQL can also be pretty-printed with newlines and indentation
assert_eq!(format!("{:#}", ast[0]), "SELECT\n 'hello'");
```
There are still some cases in this crate where different SQL with seemingly

95
changelog/0.57.0.md Normal file
View file

@ -0,0 +1,95 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
# sqlparser-rs 0.57.0 Changelog
This release consists of 39 commits from 19 contributors. See credits at the end of this changelog for more information.
**Implemented enhancements:**
- feat: Hive: support `SORT BY` direction [#1873](https://github.com/apache/datafusion-sqlparser-rs/pull/1873) (chenkovsky)
**Other:**
- Support some of pipe operators [#1759](https://github.com/apache/datafusion-sqlparser-rs/pull/1759) (simonvandel)
- Added support for `DROP DOMAIN` [#1828](https://github.com/apache/datafusion-sqlparser-rs/pull/1828) (LucaCappelletti94)
- Improve support for cursors for SQL Server [#1831](https://github.com/apache/datafusion-sqlparser-rs/pull/1831) (aharpervc)
- Add all missing table options to be handled in any order [#1747](https://github.com/apache/datafusion-sqlparser-rs/pull/1747) (benrsatori)
- Add `CREATE TRIGGER` support for SQL Server [#1810](https://github.com/apache/datafusion-sqlparser-rs/pull/1810) (aharpervc)
- Added support for `CREATE DOMAIN` [#1830](https://github.com/apache/datafusion-sqlparser-rs/pull/1830) (LucaCappelletti94)
- Allow stored procedures to be defined without `BEGIN`/`END` [#1834](https://github.com/apache/datafusion-sqlparser-rs/pull/1834) (aharpervc)
- Add support for the MATCH and REGEXP binary operators [#1840](https://github.com/apache/datafusion-sqlparser-rs/pull/1840) (lovasoa)
- Fix: parsing ident starting with underscore in certain dialects [#1835](https://github.com/apache/datafusion-sqlparser-rs/pull/1835) (MohamedAbdeen21)
- implement pretty-printing with `{:#}` [#1847](https://github.com/apache/datafusion-sqlparser-rs/pull/1847) (lovasoa)
- Fix big performance issue in string serialization [#1848](https://github.com/apache/datafusion-sqlparser-rs/pull/1848) (lovasoa)
- Add support for `DENY` statements [#1836](https://github.com/apache/datafusion-sqlparser-rs/pull/1836) (aharpervc)
- Postgresql: Add `REPLICA IDENTITY` operation for `ALTER TABLE` [#1844](https://github.com/apache/datafusion-sqlparser-rs/pull/1844) (MohamedAbdeen21)
- Add support for INCLUDE/EXCLUDE NULLS for UNPIVOT [#1849](https://github.com/apache/datafusion-sqlparser-rs/pull/1849) (Vedin)
- pretty print improvements [#1851](https://github.com/apache/datafusion-sqlparser-rs/pull/1851) (lovasoa)
- fix new rust 1.87 cargo clippy warnings [#1856](https://github.com/apache/datafusion-sqlparser-rs/pull/1856) (lovasoa)
- Update criterion requirement from 0.5 to 0.6 in /sqlparser_bench [#1857](https://github.com/apache/datafusion-sqlparser-rs/pull/1857) (dependabot[bot])
- pretty-print CREATE TABLE statements [#1854](https://github.com/apache/datafusion-sqlparser-rs/pull/1854) (lovasoa)
- pretty-print CREATE VIEW statements [#1855](https://github.com/apache/datafusion-sqlparser-rs/pull/1855) (lovasoa)
- Handle optional datatypes properly in `CREATE FUNCTION` statements [#1826](https://github.com/apache/datafusion-sqlparser-rs/pull/1826) (LucaCappelletti94)
- Mysql: Add `SRID` column option [#1852](https://github.com/apache/datafusion-sqlparser-rs/pull/1852) (MohamedAbdeen21)
- Add support for table valued functions for SQL Server [#1839](https://github.com/apache/datafusion-sqlparser-rs/pull/1839) (aharpervc)
- Keep the COLUMN keyword only if it exists when dropping the column [#1862](https://github.com/apache/datafusion-sqlparser-rs/pull/1862) (git-hulk)
- Add support for parameter default values in SQL Server [#1866](https://github.com/apache/datafusion-sqlparser-rs/pull/1866) (aharpervc)
- Add support for `TABLESAMPLE` pipe operator [#1860](https://github.com/apache/datafusion-sqlparser-rs/pull/1860) (hendrikmakait)
- Adds support for mysql's drop index [#1864](https://github.com/apache/datafusion-sqlparser-rs/pull/1864) (dmzmk)
- Fix: GROUPING SETS accept values without parenthesis [#1867](https://github.com/apache/datafusion-sqlparser-rs/pull/1867) (Vedin)
- Add ICEBERG keyword support to ALTER TABLE statement [#1869](https://github.com/apache/datafusion-sqlparser-rs/pull/1869) (osipovartem)
- MySQL: Support `index_name` in FK constraints [#1871](https://github.com/apache/datafusion-sqlparser-rs/pull/1871) (MohamedAbdeen21)
- Postgres: Apply `ONLY` keyword per table in TRUNCATE stmt [#1872](https://github.com/apache/datafusion-sqlparser-rs/pull/1872) (MohamedAbdeen21)
- Fix `CASE` expression spans [#1874](https://github.com/apache/datafusion-sqlparser-rs/pull/1874) (eliaperantoni)
- MySQL: `[[NOT] ENFORCED]` in CHECK constraint [#1870](https://github.com/apache/datafusion-sqlparser-rs/pull/1870) (MohamedAbdeen21)
- Add support for `CREATE SCHEMA WITH ( <properties> )` [#1877](https://github.com/apache/datafusion-sqlparser-rs/pull/1877) (utay)
- Add support for `ALTER TABLE DROP INDEX` [#1865](https://github.com/apache/datafusion-sqlparser-rs/pull/1865) (vimko)
- chore: Replace archived actions-rs/install action [#1876](https://github.com/apache/datafusion-sqlparser-rs/pull/1876) (assignUser)
- Allow `IF NOT EXISTS` after table name for Snowflake [#1881](https://github.com/apache/datafusion-sqlparser-rs/pull/1881) (bombsimon)
- Support `DISTINCT AS { STRUCT | VALUE }` for BigQuery [#1880](https://github.com/apache/datafusion-sqlparser-rs/pull/1880) (bombsimon)
## Credits
Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor.
```
7 Ophir LOJKINE
6 Andrew Harper
6 Mohamed Abdeen
3 Luca Cappelletti
2 Denys Tsomenko
2 Simon Sawert
1 Andrew Lamb
1 Artem Osipov
1 Chen Chongchen
1 Dmitriy Mazurin
1 Elia Perantoni
1 Hendrik Makait
1 Jacob Wujciak-Jens
1 Simon Vandel Sillesen
1 Yannick Utard
1 benrsatori
1 dependabot[bot]
1 hulk
1 vimko
```
Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release.

View file

@ -1,7 +1,8 @@
# Files to exclude from the Apache Rat (license) check
.gitignore
.tool-versions
target/*
**.gitignore
rat.txt
dev/release/rat_exclude_files.txt
fuzz/.gitignore
sqlparser_bench/img/flamegraph.svg
**Cargo.lock
filtered_rat.txt

View file

@ -63,7 +63,7 @@ $ cargo run --example cli - [--dialectname]
};
let contents = if filename == "-" {
println!("Parsing from stdin using {:?}", dialect);
println!("Parsing from stdin using {dialect:?}");
let mut buf = Vec::new();
stdin()
.read_to_end(&mut buf)

View file

@ -26,7 +26,7 @@ edition = "2018"
sqlparser = { path = "../" }
[dev-dependencies]
criterion = "0.5"
criterion = "0.6"
[[bench]]
name = "sqlparser_bench"

View file

@ -45,30 +45,29 @@ fn basic_queries(c: &mut Criterion) {
let large_statement = {
let expressions = (0..1000)
.map(|n| format!("FN_{}(COL_{})", n, n))
.map(|n| format!("FN_{n}(COL_{n})"))
.collect::<Vec<_>>()
.join(", ");
let tables = (0..1000)
.map(|n| format!("TABLE_{}", n))
.map(|n| format!("TABLE_{n}"))
.collect::<Vec<_>>()
.join(" JOIN ");
let where_condition = (0..1000)
.map(|n| format!("COL_{} = {}", n, n))
.map(|n| format!("COL_{n} = {n}"))
.collect::<Vec<_>>()
.join(" OR ");
let order_condition = (0..1000)
.map(|n| format!("COL_{} DESC", n))
.map(|n| format!("COL_{n} DESC"))
.collect::<Vec<_>>()
.join(", ");
format!(
"SELECT {} FROM {} WHERE {} ORDER BY {}",
expressions, tables, where_condition, order_condition
"SELECT {expressions} FROM {tables} WHERE {where_condition} ORDER BY {order_condition}"
)
};
group.bench_function("parse_large_statement", |b| {
b.iter(|| Parser::parse_sql(&dialect, criterion::black_box(large_statement.as_str())));
b.iter(|| Parser::parse_sql(&dialect, std::hint::black_box(large_statement.as_str())));
});
let large_statement = Parser::parse_sql(&dialect, large_statement.as_str())

View file

@ -48,7 +48,17 @@ pub enum DataType {
/// Table type in [PostgreSQL], e.g. CREATE FUNCTION RETURNS TABLE(...).
///
/// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html
Table(Vec<ColumnDef>),
/// [MsSQL]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql?view=sql-server-ver16#c-create-a-multi-statement-table-valued-function
Table(Option<Vec<ColumnDef>>),
/// Table type with a name, e.g. CREATE FUNCTION RETURNS @result TABLE(...).
///
/// [MsSQl]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql?view=sql-server-ver16#table
NamedTable {
/// Table name.
name: ObjectName,
/// Table columns.
columns: Vec<ColumnDef>,
},
/// Fixed-length character type, e.g. CHARACTER(10).
Character(Option<CharacterLength>),
/// Fixed-length char type, e.g. CHAR(10).
@ -436,6 +446,14 @@ pub enum DataType {
///
/// [PostgreSQL]: https://www.postgresql.org/docs/9.5/functions-geometry.html
GeometricType(GeometricTypeKind),
/// PostgreSQL text search vectors, see [PostgreSQL].
///
/// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-textsearch.html
TsVector,
/// PostgreSQL text search query, see [PostgreSQL].
///
/// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-textsearch.html
TsQuery,
}
impl fmt::Display for DataType {
@ -648,7 +666,7 @@ impl fmt::Display for DataType {
}
DataType::Enum(vals, bits) => {
match bits {
Some(bits) => write!(f, "ENUM{}", bits),
Some(bits) => write!(f, "ENUM{bits}"),
None => write!(f, "ENUM"),
}?;
write!(f, "(")?;
@ -696,16 +714,16 @@ impl fmt::Display for DataType {
}
// ClickHouse
DataType::Nullable(data_type) => {
write!(f, "Nullable({})", data_type)
write!(f, "Nullable({data_type})")
}
DataType::FixedString(character_length) => {
write!(f, "FixedString({})", character_length)
write!(f, "FixedString({character_length})")
}
DataType::LowCardinality(data_type) => {
write!(f, "LowCardinality({})", data_type)
write!(f, "LowCardinality({data_type})")
}
DataType::Map(key_data_type, value_data_type) => {
write!(f, "Map({}, {})", key_data_type, value_data_type)
write!(f, "Map({key_data_type}, {value_data_type})")
}
DataType::Tuple(fields) => {
write!(f, "Tuple({})", display_comma_separated(fields))
@ -716,8 +734,20 @@ impl fmt::Display for DataType {
DataType::Unspecified => Ok(()),
DataType::Trigger => write!(f, "TRIGGER"),
DataType::AnyType => write!(f, "ANY TYPE"),
DataType::Table(fields) => write!(f, "TABLE({})", display_comma_separated(fields)),
DataType::GeometricType(kind) => write!(f, "{}", kind),
DataType::Table(fields) => match fields {
Some(fields) => {
write!(f, "TABLE({})", display_comma_separated(fields))
}
None => {
write!(f, "TABLE")
}
},
DataType::NamedTable { name, columns } => {
write!(f, "{} TABLE ({})", name, display_comma_separated(columns))
}
DataType::GeometricType(kind) => write!(f, "{kind}"),
DataType::TsVector => write!(f, "TSVECTOR"),
DataType::TsQuery => write!(f, "TSQUERY"),
}
}
}
@ -912,7 +942,7 @@ impl fmt::Display for CharacterLength {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
CharacterLength::IntegerLength { length, unit } => {
write!(f, "{}", length)?;
write!(f, "{length}")?;
if let Some(unit) = unit {
write!(f, " {unit}")?;
}
@ -967,7 +997,7 @@ impl fmt::Display for BinaryLength {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
BinaryLength::IntegerLength { length } => {
write!(f, "{}", length)?;
write!(f, "{length}")?;
}
BinaryLength::Max => {
write!(f, "MAX")?;

View file

@ -173,7 +173,7 @@ impl fmt::Display for AlterRoleOperation {
in_database,
} => {
if let Some(database_name) = in_database {
write!(f, "IN DATABASE {} ", database_name)?;
write!(f, "IN DATABASE {database_name} ")?;
}
match config_value {
@ -187,7 +187,7 @@ impl fmt::Display for AlterRoleOperation {
in_database,
} => {
if let Some(database_name) = in_database {
write!(f, "IN DATABASE {} ", database_name)?;
write!(f, "IN DATABASE {database_name} ")?;
}
match config_name {
@ -218,15 +218,15 @@ impl fmt::Display for Use {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("USE ")?;
match self {
Use::Catalog(name) => write!(f, "CATALOG {}", name),
Use::Schema(name) => write!(f, "SCHEMA {}", name),
Use::Database(name) => write!(f, "DATABASE {}", name),
Use::Warehouse(name) => write!(f, "WAREHOUSE {}", name),
Use::Role(name) => write!(f, "ROLE {}", name),
Use::Catalog(name) => write!(f, "CATALOG {name}"),
Use::Schema(name) => write!(f, "SCHEMA {name}"),
Use::Database(name) => write!(f, "DATABASE {name}"),
Use::Warehouse(name) => write!(f, "WAREHOUSE {name}"),
Use::Role(name) => write!(f, "ROLE {name}"),
Use::SecondaryRoles(secondary_roles) => {
write!(f, "SECONDARY ROLES {}", secondary_roles)
write!(f, "SECONDARY ROLES {secondary_roles}")
}
Use::Object(name) => write!(f, "{}", name),
Use::Object(name) => write!(f, "{name}"),
Use::Default => write!(f, "DEFAULT"),
}
}

View file

@ -30,22 +30,48 @@ use sqlparser_derive::{Visit, VisitMut};
use crate::ast::value::escape_single_quote_string;
use crate::ast::{
display_comma_separated, display_separated, CommentDef, CreateFunctionBody,
display_comma_separated, display_separated, ArgMode, CommentDef, CreateFunctionBody,
CreateFunctionUsing, DataType, Expr, FunctionBehavior, FunctionCalledOnNull,
FunctionDeterminismSpecifier, FunctionParallel, Ident, MySQLColumnPosition, ObjectName,
OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag, Value,
ValueWithSpan,
FunctionDeterminismSpecifier, FunctionParallel, Ident, IndexColumn, MySQLColumnPosition,
ObjectName, OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag,
Value, ValueWithSpan,
};
use crate::keywords::Keyword;
use crate::tokenizer::Token;
/// ALTER TABLE operation REPLICA IDENTITY values
/// See [Postgres ALTER TABLE docs](https://www.postgresql.org/docs/current/sql-altertable.html)
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub enum ReplicaIdentity {
None,
Full,
Default,
Index(Ident),
}
impl fmt::Display for ReplicaIdentity {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ReplicaIdentity::None => f.write_str("NONE"),
ReplicaIdentity::Full => f.write_str("FULL"),
ReplicaIdentity::Default => f.write_str("DEFAULT"),
ReplicaIdentity::Index(idx) => write!(f, "USING INDEX {idx}"),
}
}
}
/// An `ALTER TABLE` (`Statement::AlterTable`) operation
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub enum AlterTableOperation {
/// `ADD <table_constraint>`
AddConstraint(TableConstraint),
/// `ADD <table_constraint> [NOT VALID]`
AddConstraint {
constraint: TableConstraint,
not_valid: bool,
},
/// `ADD [COLUMN] [IF NOT EXISTS] <column_def>`
AddColumn {
/// `[COLUMN]`.
@ -114,9 +140,10 @@ pub enum AlterTableOperation {
name: Ident,
drop_behavior: Option<DropBehavior>,
},
/// `DROP [ COLUMN ] [ IF EXISTS ] <column_name> [ CASCADE ]`
/// `DROP [ COLUMN ] [ IF EXISTS ] <column_name> [ , <column_name>, ... ] [ CASCADE ]`
DropColumn {
column_name: Ident,
has_column_keyword: bool,
column_names: Vec<Ident>,
if_exists: bool,
drop_behavior: Option<DropBehavior>,
},
@ -163,6 +190,12 @@ pub enum AlterTableOperation {
DropForeignKey {
name: Ident,
},
/// `DROP INDEX <index_name>`
///
/// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html
DropIndex {
name: Ident,
},
/// `ENABLE ALWAYS RULE rewrite_rule_name`
///
/// Note: this is a PostgreSQL-specific operation.
@ -208,6 +241,13 @@ pub enum AlterTableOperation {
old_partitions: Vec<Expr>,
new_partitions: Vec<Expr>,
},
/// REPLICA IDENTITY { DEFAULT | USING INDEX index_name | FULL | NOTHING }
///
/// Note: this is a PostgreSQL-specific operation.
/// Please refer to [PostgreSQL documentation](https://www.postgresql.org/docs/current/sql-altertable.html)
ReplicaIdentity {
identity: ReplicaIdentity,
},
/// Add Partitions
AddPartitions {
if_not_exists: bool,
@ -307,6 +347,10 @@ pub enum AlterTableOperation {
equals: bool,
value: ValueWithSpan,
},
/// `VALIDATE CONSTRAINT <name>`
ValidateConstraint {
name: Ident,
},
}
/// An `ALTER Policy` (`Statement::AlterPolicy`) operation
@ -413,7 +457,7 @@ pub enum Owner {
impl fmt::Display for Owner {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Owner::Ident(ident) => write!(f, "{}", ident),
Owner::Ident(ident) => write!(f, "{ident}"),
Owner::CurrentRole => write!(f, "CURRENT_ROLE"),
Owner::CurrentUser => write!(f, "CURRENT_USER"),
Owner::SessionUser => write!(f, "SESSION_USER"),
@ -457,7 +501,16 @@ impl fmt::Display for AlterTableOperation {
display_separated(new_partitions, " "),
ine = if *if_not_exists { " IF NOT EXISTS" } else { "" }
),
AlterTableOperation::AddConstraint(c) => write!(f, "ADD {c}"),
AlterTableOperation::AddConstraint {
not_valid,
constraint,
} => {
write!(f, "ADD {constraint}")?;
if *not_valid {
write!(f, " NOT VALID")?;
}
Ok(())
}
AlterTableOperation::AddColumn {
column_keyword,
if_not_exists,
@ -488,7 +541,7 @@ impl fmt::Display for AlterTableOperation {
if *if_not_exists {
write!(f, " IF NOT EXISTS")?;
}
write!(f, " {} ({})", name, query)
write!(f, " {name} ({query})")
}
AlterTableOperation::Algorithm { equals, algorithm } => {
write!(
@ -503,7 +556,7 @@ impl fmt::Display for AlterTableOperation {
if *if_exists {
write!(f, " IF EXISTS")?;
}
write!(f, " {}", name)
write!(f, " {name}")
}
AlterTableOperation::MaterializeProjection {
if_exists,
@ -514,9 +567,9 @@ impl fmt::Display for AlterTableOperation {
if *if_exists {
write!(f, " IF EXISTS")?;
}
write!(f, " {}", name)?;
write!(f, " {name}")?;
if let Some(partition) = partition {
write!(f, " IN PARTITION {}", partition)?;
write!(f, " IN PARTITION {partition}")?;
}
Ok(())
}
@ -529,9 +582,9 @@ impl fmt::Display for AlterTableOperation {
if *if_exists {
write!(f, " IF EXISTS")?;
}
write!(f, " {}", name)?;
write!(f, " {name}")?;
if let Some(partition) = partition {
write!(f, " IN PARTITION {}", partition)?;
write!(f, " IN PARTITION {partition}")?;
}
Ok(())
}
@ -575,15 +628,18 @@ impl fmt::Display for AlterTableOperation {
}
AlterTableOperation::DropPrimaryKey => write!(f, "DROP PRIMARY KEY"),
AlterTableOperation::DropForeignKey { name } => write!(f, "DROP FOREIGN KEY {name}"),
AlterTableOperation::DropIndex { name } => write!(f, "DROP INDEX {name}"),
AlterTableOperation::DropColumn {
column_name,
has_column_keyword,
column_names: column_name,
if_exists,
drop_behavior,
} => write!(
f,
"DROP COLUMN {}{}{}",
"DROP {}{}{}{}",
if *has_column_keyword { "COLUMN " } else { "" },
if *if_exists { "IF EXISTS " } else { "" },
column_name,
display_comma_separated(column_name),
match drop_behavior {
None => "",
Some(DropBehavior::Restrict) => " RESTRICT",
@ -729,6 +785,12 @@ impl fmt::Display for AlterTableOperation {
AlterTableOperation::Lock { equals, lock } => {
write!(f, "LOCK {}{}", if *equals { "= " } else { "" }, lock)
}
AlterTableOperation::ReplicaIdentity { identity } => {
write!(f, "REPLICA IDENTITY {identity}")
}
AlterTableOperation::ValidateConstraint { name } => {
write!(f, "VALIDATE CONSTRAINT {name}")
}
}
}
}
@ -850,7 +912,10 @@ pub enum AlterColumnOperation {
data_type: DataType,
/// PostgreSQL specific
using: Option<Expr>,
/// Set to true if the statement includes the `SET DATA TYPE` keywords
had_set: bool,
},
/// `ADD GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( sequence_options ) ]`
///
/// Note: this is a PostgreSQL-specific operation.
@ -871,12 +936,19 @@ impl fmt::Display for AlterColumnOperation {
AlterColumnOperation::DropDefault => {
write!(f, "DROP DEFAULT")
}
AlterColumnOperation::SetDataType { data_type, using } => {
if let Some(expr) = using {
write!(f, "SET DATA TYPE {data_type} USING {expr}")
} else {
write!(f, "SET DATA TYPE {data_type}")
AlterColumnOperation::SetDataType {
data_type,
using,
had_set,
} => {
if *had_set {
write!(f, "SET DATA ")?;
}
write!(f, "TYPE {data_type}")?;
if let Some(expr) = using {
write!(f, " USING {expr}")?;
}
Ok(())
}
AlterColumnOperation::AddGenerated {
generated_as,
@ -936,7 +1008,7 @@ pub enum TableConstraint {
/// [1]: IndexType
index_type: Option<IndexType>,
/// Identifiers of the columns that are unique.
columns: Vec<Ident>,
columns: Vec<IndexColumn>,
index_options: Vec<IndexOption>,
characteristics: Option<ConstraintCharacteristics>,
/// Optional Postgres nulls handling: `[ NULLS [ NOT ] DISTINCT ]`
@ -972,7 +1044,7 @@ pub enum TableConstraint {
/// [1]: IndexType
index_type: Option<IndexType>,
/// Identifiers of the columns that form the primary key.
columns: Vec<Ident>,
columns: Vec<IndexColumn>,
index_options: Vec<IndexOption>,
characteristics: Option<ConstraintCharacteristics>,
},
@ -983,6 +1055,9 @@ pub enum TableConstraint {
/// }`).
ForeignKey {
name: Option<Ident>,
/// MySQL-specific field
/// <https://dev.mysql.com/doc/refman/8.4/en/create-table-foreign-keys.html>
index_name: Option<Ident>,
columns: Vec<Ident>,
foreign_table: ObjectName,
referred_columns: Vec<Ident>,
@ -990,10 +1065,13 @@ pub enum TableConstraint {
on_update: Option<ReferentialAction>,
characteristics: Option<ConstraintCharacteristics>,
},
/// `[ CONSTRAINT <name> ] CHECK (<expr>)`
/// `[ CONSTRAINT <name> ] CHECK (<expr>) [[NOT] ENFORCED]`
Check {
name: Option<Ident>,
expr: Box<Expr>,
/// MySQL-specific syntax
/// <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
enforced: Option<bool>,
},
/// MySQLs [index definition][1] for index creation. Not present on ANSI so, for now, the usage
/// is restricted to MySQL, as no other dialects that support this syntax were found.
@ -1011,7 +1089,7 @@ pub enum TableConstraint {
/// [1]: IndexType
index_type: Option<IndexType>,
/// Referred column identifier list.
columns: Vec<Ident>,
columns: Vec<IndexColumn>,
},
/// MySQLs [fulltext][1] definition. Since the [`SPATIAL`][2] definition is exactly the same,
/// and MySQL displays both the same way, it is part of this definition as well.
@ -1034,7 +1112,7 @@ pub enum TableConstraint {
/// Optional index name.
opt_index_name: Option<Ident>,
/// Referred column identifier list.
columns: Vec<Ident>,
columns: Vec<IndexColumn>,
},
}
@ -1093,6 +1171,7 @@ impl fmt::Display for TableConstraint {
}
TableConstraint::ForeignKey {
name,
index_name,
columns,
foreign_table,
referred_columns,
@ -1102,8 +1181,9 @@ impl fmt::Display for TableConstraint {
} => {
write!(
f,
"{}FOREIGN KEY ({}) REFERENCES {}",
"{}FOREIGN KEY{} ({}) REFERENCES {}",
display_constraint_name(name),
display_option_spaced(index_name),
display_comma_separated(columns),
foreign_table,
)?;
@ -1117,12 +1197,21 @@ impl fmt::Display for TableConstraint {
write!(f, " ON UPDATE {action}")?;
}
if let Some(characteristics) = characteristics {
write!(f, " {}", characteristics)?;
write!(f, " {characteristics}")?;
}
Ok(())
}
TableConstraint::Check { name, expr } => {
write!(f, "{}CHECK ({})", display_constraint_name(name), expr)
TableConstraint::Check {
name,
expr,
enforced,
} => {
write!(f, "{}CHECK ({})", display_constraint_name(name), expr)?;
if let Some(b) = enforced {
write!(f, " {}", if *b { "ENFORCED" } else { "NOT ENFORCED" })
} else {
Ok(())
}
}
TableConstraint::Index {
display_as_key,
@ -1248,7 +1337,7 @@ impl fmt::Display for IndexType {
Self::SPGiST => write!(f, "SPGIST"),
Self::BRIN => write!(f, "BRIN"),
Self::Bloom => write!(f, "BLOOM"),
Self::Custom(name) => write!(f, "{}", name),
Self::Custom(name) => write!(f, "{name}"),
}
}
}
@ -1307,11 +1396,16 @@ impl fmt::Display for NullsDistinctOption {
pub struct ProcedureParam {
pub name: Ident,
pub data_type: DataType,
pub mode: Option<ArgMode>,
}
impl fmt::Display for ProcedureParam {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {}", self.name, self.data_type)
if let Some(mode) = &self.mode {
write!(f, "{mode} {} {}", self.name, self.data_type)
} else {
write!(f, "{} {}", self.name, self.data_type)
}
}
}
@ -1361,17 +1455,41 @@ impl fmt::Display for ColumnDef {
pub struct ViewColumnDef {
pub name: Ident,
pub data_type: Option<DataType>,
pub options: Option<Vec<ColumnOption>>,
pub options: Option<ColumnOptions>,
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub enum ColumnOptions {
CommaSeparated(Vec<ColumnOption>),
SpaceSeparated(Vec<ColumnOption>),
}
impl ColumnOptions {
pub fn as_slice(&self) -> &[ColumnOption] {
match self {
ColumnOptions::CommaSeparated(options) => options.as_slice(),
ColumnOptions::SpaceSeparated(options) => options.as_slice(),
}
}
}
impl fmt::Display for ViewColumnDef {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.name)?;
if let Some(data_type) = self.data_type.as_ref() {
write!(f, " {}", data_type)?;
write!(f, " {data_type}")?;
}
if let Some(options) = self.options.as_ref() {
write!(f, " {}", display_comma_separated(options.as_slice()))?;
match options {
ColumnOptions::CommaSeparated(column_options) => {
write!(f, " {}", display_comma_separated(column_options.as_slice()))?;
}
ColumnOptions::SpaceSeparated(column_options) => {
write!(f, " {}", display_separated(column_options.as_slice(), " "))?
}
}
}
Ok(())
}
@ -1591,7 +1709,7 @@ pub struct ColumnPolicyProperty {
/// ```
/// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table
pub with: bool,
pub policy_name: Ident,
pub policy_name: ObjectName,
pub using_columns: Option<Vec<Ident>>,
}
@ -1725,6 +1843,13 @@ pub enum ColumnOption {
/// ```
/// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table
Tags(TagsColumnOption),
/// MySQL specific: Spatial reference identifier
/// Syntax:
/// ```sql
/// CREATE TABLE geom (g GEOMETRY NOT NULL SRID 4326);
/// ```
/// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/creating-spatial-indexes.html
Srid(Box<Expr>),
}
impl fmt::Display for ColumnOption {
@ -1749,7 +1874,7 @@ impl fmt::Display for ColumnOption {
} => {
write!(f, "{}", if *is_primary { "PRIMARY KEY" } else { "UNIQUE" })?;
if let Some(characteristics) = characteristics {
write!(f, " {}", characteristics)?;
write!(f, " {characteristics}")?;
}
Ok(())
}
@ -1771,7 +1896,7 @@ impl fmt::Display for ColumnOption {
write!(f, " ON UPDATE {action}")?;
}
if let Some(characteristics) = characteristics {
write!(f, " {}", characteristics)?;
write!(f, " {characteristics}")?;
}
Ok(())
}
@ -1831,7 +1956,7 @@ impl fmt::Display for ColumnOption {
write!(f, "{parameters}")
}
OnConflict(keyword) => {
write!(f, "ON CONFLICT {:?}", keyword)?;
write!(f, "ON CONFLICT {keyword:?}")?;
Ok(())
}
Policy(parameters) => {
@ -1840,6 +1965,9 @@ impl fmt::Display for ColumnOption {
Tags(tags) => {
write!(f, "{tags}")
}
Srid(srid) => {
write!(f, "SRID {srid}")
}
}
}
}
@ -2153,6 +2281,55 @@ impl fmt::Display for ClusteredBy {
}
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
/// ```sql
/// CREATE DOMAIN name [ AS ] data_type
/// [ COLLATE collation ]
/// [ DEFAULT expression ]
/// [ domain_constraint [ ... ] ]
///
/// where domain_constraint is:
///
/// [ CONSTRAINT constraint_name ]
/// { NOT NULL | NULL | CHECK (expression) }
/// ```
/// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createdomain.html)
pub struct CreateDomain {
/// The name of the domain to be created.
pub name: ObjectName,
/// The data type of the domain.
pub data_type: DataType,
/// The collation of the domain.
pub collation: Option<Ident>,
/// The default value of the domain.
pub default: Option<Expr>,
/// The constraints of the domain.
pub constraints: Vec<TableConstraint>,
}
impl fmt::Display for CreateDomain {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"CREATE DOMAIN {name} AS {data_type}",
name = self.name,
data_type = self.data_type
)?;
if let Some(collation) = &self.collation {
write!(f, " COLLATE {collation}")?;
}
if let Some(default) = &self.default {
write!(f, " DEFAULT {default}")?;
}
if !self.constraints.is_empty() {
write!(f, " {}", display_separated(&self.constraints, " "))?;
}
Ok(())
}
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
@ -2264,6 +2441,12 @@ impl fmt::Display for CreateFunction {
if let Some(CreateFunctionBody::Return(function_body)) = &self.function_body {
write!(f, " RETURN {function_body}")?;
}
if let Some(CreateFunctionBody::AsReturnExpr(function_body)) = &self.function_body {
write!(f, " AS RETURN {function_body}")?;
}
if let Some(CreateFunctionBody::AsReturnSelect(function_body)) = &self.function_body {
write!(f, " AS RETURN {function_body}")?;
}
if let Some(using) = &self.using {
write!(f, " {using}")?;
}

View file

@ -29,15 +29,17 @@ use serde::{Deserialize, Serialize};
#[cfg(feature = "visitor")]
use sqlparser_derive::{Visit, VisitMut};
use crate::display_utils::{indented_list, DisplayCommaSeparated, Indent, NewLine, SpaceOrNewline};
pub use super::ddl::{ColumnDef, TableConstraint};
use super::{
display_comma_separated, display_separated, query::InputFormatClause, Assignment, ClusteredBy,
CommentDef, Expr, FileFormat, FromTable, HiveDistributionStyle, HiveFormat, HiveIOFormat,
HiveRowFormat, Ident, IndexType, InsertAliases, MysqlInsertPriority, ObjectName, OnCommit,
OnInsert, OneOrManyWithParens, OrderByExpr, Query, RowAccessPolicy, SelectItem, Setting,
SqlOption, SqliteOnConflict, StorageSerializationPolicy, TableEngine, TableObject,
TableWithJoins, Tag, WrappedCollection,
CommentDef, CreateTableOptions, Expr, FileFormat, FromTable, HiveDistributionStyle, HiveFormat,
HiveIOFormat, HiveRowFormat, Ident, IndexType, InsertAliases, MysqlInsertPriority, ObjectName,
OnCommit, OnInsert, OneOrManyWithParens, OrderByExpr, Query, RowAccessPolicy, SelectItem,
Setting, SqliteOnConflict, StorageSerializationPolicy, TableObject, TableWithJoins, Tag,
WrappedCollection,
};
/// Index column type.
@ -53,7 +55,7 @@ impl Display for IndexColumn {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.column)?;
if let Some(operator_class) = &self.operator_class {
write!(f, " {}", operator_class)?;
write!(f, " {operator_class}")?;
}
Ok(())
}
@ -146,19 +148,17 @@ pub struct CreateTable {
pub constraints: Vec<TableConstraint>,
pub hive_distribution: HiveDistributionStyle,
pub hive_formats: Option<HiveFormat>,
pub table_properties: Vec<SqlOption>,
pub with_options: Vec<SqlOption>,
pub table_options: CreateTableOptions,
pub file_format: Option<FileFormat>,
pub location: Option<String>,
pub query: Option<Box<Query>>,
pub without_rowid: bool,
pub like: Option<ObjectName>,
pub clone: Option<ObjectName>,
pub engine: Option<TableEngine>,
// For Hive dialect, the table comment is after the column definitions without `=`,
// so the `comment` field is optional and different than the comment field in the general options list.
// [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable)
pub comment: Option<CommentDef>,
pub auto_increment_offset: Option<u32>,
pub default_charset: Option<String>,
pub collation: Option<String>,
pub on_commit: Option<OnCommit>,
/// ClickHouse "ON CLUSTER" clause:
/// <https://clickhouse.com/docs/en/sql-reference/distributed-ddl/>
@ -175,13 +175,12 @@ pub struct CreateTable {
pub partition_by: Option<Box<Expr>>,
/// BigQuery: Table clustering column list.
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#table_option_list>
pub cluster_by: Option<WrappedCollection<Vec<Ident>>>,
/// Snowflake: Table clustering list which contains base column, expressions on base columns.
/// <https://docs.snowflake.com/en/user-guide/tables-clustering-keys#defining-a-clustering-key-for-a-table>
pub cluster_by: Option<WrappedCollection<Vec<Expr>>>,
/// Hive: Table clustering column list.
/// <https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable>
pub clustered_by: Option<ClusteredBy>,
/// BigQuery: Table options list.
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#table_option_list>
pub options: Option<Vec<SqlOption>>,
/// Postgres `INHERITs` clause, which contains the list of tables from which
/// the new table inherits.
/// <https://www.postgresql.org/docs/current/ddl-inherit.html>
@ -267,22 +266,27 @@ impl Display for CreateTable {
name = self.name,
)?;
if let Some(on_cluster) = &self.on_cluster {
write!(f, " ON CLUSTER {}", on_cluster)?;
write!(f, " ON CLUSTER {on_cluster}")?;
}
if !self.columns.is_empty() || !self.constraints.is_empty() {
write!(f, " ({}", display_comma_separated(&self.columns))?;
f.write_str(" (")?;
NewLine.fmt(f)?;
Indent(DisplayCommaSeparated(&self.columns)).fmt(f)?;
if !self.columns.is_empty() && !self.constraints.is_empty() {
write!(f, ", ")?;
f.write_str(",")?;
SpaceOrNewline.fmt(f)?;
}
write!(f, "{})", display_comma_separated(&self.constraints))?;
Indent(DisplayCommaSeparated(&self.constraints)).fmt(f)?;
NewLine.fmt(f)?;
f.write_str(")")?;
} else if self.query.is_none() && self.like.is_none() && self.clone.is_none() {
// PostgreSQL allows `CREATE TABLE t ();`, but requires empty parens
write!(f, " ()")?;
f.write_str(" ()")?;
}
// Hive table comment should be after column definitions, please refer to:
// [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable)
if let Some(CommentDef::AfterColumnDefsWithoutEq(comment)) = &self.comment {
if let Some(comment) = &self.comment {
write!(f, " COMMENT '{comment}'")?;
}
@ -375,40 +379,19 @@ impl Display for CreateTable {
}
write!(f, " LOCATION '{}'", self.location.as_ref().unwrap())?;
}
if !self.table_properties.is_empty() {
write!(
f,
" TBLPROPERTIES ({})",
display_comma_separated(&self.table_properties)
)?;
}
if !self.with_options.is_empty() {
write!(f, " WITH ({})", display_comma_separated(&self.with_options))?;
}
if let Some(engine) = &self.engine {
write!(f, " ENGINE={engine}")?;
}
if let Some(comment_def) = &self.comment {
match comment_def {
CommentDef::WithEq(comment) => {
write!(f, " COMMENT = '{comment}'")?;
}
CommentDef::WithoutEq(comment) => {
write!(f, " COMMENT '{comment}'")?;
}
// For CommentDef::AfterColumnDefsWithoutEq will be displayed after column definition
CommentDef::AfterColumnDefsWithoutEq(_) => (),
}
match &self.table_options {
options @ CreateTableOptions::With(_)
| options @ CreateTableOptions::Plain(_)
| options @ CreateTableOptions::TableProperties(_) => write!(f, " {options}")?,
_ => (),
}
if let Some(auto_increment_offset) = self.auto_increment_offset {
write!(f, " AUTO_INCREMENT {auto_increment_offset}")?;
}
if let Some(primary_key) = &self.primary_key {
write!(f, " PRIMARY KEY {}", primary_key)?;
write!(f, " PRIMARY KEY {primary_key}")?;
}
if let Some(order_by) = &self.order_by {
write!(f, " ORDER BY {}", order_by)?;
write!(f, " ORDER BY {order_by}")?;
}
if let Some(inherits) = &self.inherits {
write!(f, " INHERITS ({})", display_comma_separated(inherits))?;
@ -419,15 +402,9 @@ impl Display for CreateTable {
if let Some(cluster_by) = self.cluster_by.as_ref() {
write!(f, " CLUSTER BY {cluster_by}")?;
}
if let Some(options) = self.options.as_ref() {
write!(
f,
" OPTIONS({})",
display_comma_separated(options.as_slice())
)?;
if let options @ CreateTableOptions::Options(_) = &self.table_options {
write!(f, " {options}")?;
}
if let Some(external_volume) = self.external_volume.as_ref() {
write!(f, " EXTERNAL_VOLUME = '{external_volume}'")?;
}
@ -503,13 +480,6 @@ impl Display for CreateTable {
write!(f, " WITH TAG ({})", display_comma_separated(tag.as_slice()))?;
}
if let Some(default_charset) = &self.default_charset {
write!(f, " DEFAULT CHARSET={default_charset}")?;
}
if let Some(collation) = &self.collation {
write!(f, " COLLATE={collation}")?;
}
if self.on_commit.is_some() {
let on_commit = match self.on_commit {
Some(OnCommit::DeleteRows) => "ON COMMIT DELETE ROWS",
@ -618,28 +588,32 @@ impl Display for Insert {
)?;
}
if !self.columns.is_empty() {
write!(f, "({}) ", display_comma_separated(&self.columns))?;
write!(f, "({})", display_comma_separated(&self.columns))?;
SpaceOrNewline.fmt(f)?;
}
if let Some(ref parts) = self.partitioned {
if !parts.is_empty() {
write!(f, "PARTITION ({}) ", display_comma_separated(parts))?;
write!(f, "PARTITION ({})", display_comma_separated(parts))?;
SpaceOrNewline.fmt(f)?;
}
}
if !self.after_columns.is_empty() {
write!(f, "({}) ", display_comma_separated(&self.after_columns))?;
write!(f, "({})", display_comma_separated(&self.after_columns))?;
SpaceOrNewline.fmt(f)?;
}
if let Some(settings) = &self.settings {
write!(f, "SETTINGS {} ", display_comma_separated(settings))?;
write!(f, "SETTINGS {}", display_comma_separated(settings))?;
SpaceOrNewline.fmt(f)?;
}
if let Some(source) = &self.source {
write!(f, "{source}")?;
source.fmt(f)?;
} else if !self.assignments.is_empty() {
write!(f, "SET ")?;
write!(f, "{}", display_comma_separated(&self.assignments))?;
write!(f, "SET")?;
indented_list(f, &self.assignments)?;
} else if let Some(format_clause) = &self.format_clause {
write!(f, "{format_clause}")?;
format_clause.fmt(f)?;
} else if self.columns.is_empty() {
write!(f, "DEFAULT VALUES")?;
}
@ -659,7 +633,9 @@ impl Display for Insert {
}
if let Some(returning) = &self.returning {
write!(f, " RETURNING {}", display_comma_separated(returning))?;
SpaceOrNewline.fmt(f)?;
f.write_str("RETURNING")?;
indented_list(f, returning)?;
}
Ok(())
}
@ -688,32 +664,45 @@ pub struct Delete {
impl Display for Delete {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "DELETE ")?;
f.write_str("DELETE")?;
if !self.tables.is_empty() {
write!(f, "{} ", display_comma_separated(&self.tables))?;
indented_list(f, &self.tables)?;
}
match &self.from {
FromTable::WithFromKeyword(from) => {
write!(f, "FROM {}", display_comma_separated(from))?;
f.write_str(" FROM")?;
indented_list(f, from)?;
}
FromTable::WithoutKeyword(from) => {
write!(f, "{}", display_comma_separated(from))?;
indented_list(f, from)?;
}
}
if let Some(using) = &self.using {
write!(f, " USING {}", display_comma_separated(using))?;
SpaceOrNewline.fmt(f)?;
f.write_str("USING")?;
indented_list(f, using)?;
}
if let Some(selection) = &self.selection {
write!(f, " WHERE {selection}")?;
SpaceOrNewline.fmt(f)?;
f.write_str("WHERE")?;
SpaceOrNewline.fmt(f)?;
Indent(selection).fmt(f)?;
}
if let Some(returning) = &self.returning {
write!(f, " RETURNING {}", display_comma_separated(returning))?;
SpaceOrNewline.fmt(f)?;
f.write_str("RETURNING")?;
indented_list(f, returning)?;
}
if !self.order_by.is_empty() {
write!(f, " ORDER BY {}", display_comma_separated(&self.order_by))?;
SpaceOrNewline.fmt(f)?;
f.write_str("ORDER BY")?;
indented_list(f, &self.order_by)?;
}
if let Some(limit) = &self.limit {
write!(f, " LIMIT {limit}")?;
SpaceOrNewline.fmt(f)?;
f.write_str("LIMIT")?;
SpaceOrNewline.fmt(f)?;
Indent(limit).fmt(f)?;
}
Ok(())
}

View file

@ -67,7 +67,7 @@ impl fmt::Display for KeyValueOptions {
} else {
f.write_str(" ")?;
}
write!(f, "{}", option)?;
write!(f, "{option}")?;
}
}
Ok(())

View file

@ -26,10 +26,12 @@ use sqlparser_derive::{Visit, VisitMut};
use super::super::dml::CreateTable;
use crate::ast::{
ClusteredBy, ColumnDef, CommentDef, Expr, FileFormat, HiveDistributionStyle, HiveFormat, Ident,
ObjectName, OnCommit, OneOrManyWithParens, Query, RowAccessPolicy, SqlOption, Statement,
StorageSerializationPolicy, TableConstraint, TableEngine, Tag, WrappedCollection,
ClusteredBy, ColumnDef, CommentDef, CreateTableOptions, Expr, FileFormat,
HiveDistributionStyle, HiveFormat, Ident, ObjectName, OnCommit, OneOrManyWithParens, Query,
RowAccessPolicy, Statement, StorageSerializationPolicy, TableConstraint, Tag,
WrappedCollection,
};
use crate::parser::ParserError;
/// Builder for create table statement variant ([1]).
@ -76,27 +78,20 @@ pub struct CreateTableBuilder {
pub constraints: Vec<TableConstraint>,
pub hive_distribution: HiveDistributionStyle,
pub hive_formats: Option<HiveFormat>,
pub table_properties: Vec<SqlOption>,
pub with_options: Vec<SqlOption>,
pub file_format: Option<FileFormat>,
pub location: Option<String>,
pub query: Option<Box<Query>>,
pub without_rowid: bool,
pub like: Option<ObjectName>,
pub clone: Option<ObjectName>,
pub engine: Option<TableEngine>,
pub comment: Option<CommentDef>,
pub auto_increment_offset: Option<u32>,
pub default_charset: Option<String>,
pub collation: Option<String>,
pub on_commit: Option<OnCommit>,
pub on_cluster: Option<Ident>,
pub primary_key: Option<Box<Expr>>,
pub order_by: Option<OneOrManyWithParens<Expr>>,
pub partition_by: Option<Box<Expr>>,
pub cluster_by: Option<WrappedCollection<Vec<Ident>>>,
pub cluster_by: Option<WrappedCollection<Vec<Expr>>>,
pub clustered_by: Option<ClusteredBy>,
pub options: Option<Vec<SqlOption>>,
pub inherits: Option<Vec<ObjectName>>,
pub strict: bool,
pub copy_grants: bool,
@ -113,6 +108,7 @@ pub struct CreateTableBuilder {
pub catalog: Option<String>,
pub catalog_sync: Option<String>,
pub storage_serialization_policy: Option<StorageSerializationPolicy>,
pub table_options: CreateTableOptions,
}
impl CreateTableBuilder {
@ -131,19 +127,13 @@ impl CreateTableBuilder {
constraints: vec![],
hive_distribution: HiveDistributionStyle::NONE,
hive_formats: None,
table_properties: vec![],
with_options: vec![],
file_format: None,
location: None,
query: None,
without_rowid: false,
like: None,
clone: None,
engine: None,
comment: None,
auto_increment_offset: None,
default_charset: None,
collation: None,
on_commit: None,
on_cluster: None,
primary_key: None,
@ -151,7 +141,6 @@ impl CreateTableBuilder {
partition_by: None,
cluster_by: None,
clustered_by: None,
options: None,
inherits: None,
strict: false,
copy_grants: false,
@ -168,6 +157,7 @@ impl CreateTableBuilder {
catalog: None,
catalog_sync: None,
storage_serialization_policy: None,
table_options: CreateTableOptions::None,
}
}
pub fn or_replace(mut self, or_replace: bool) -> Self {
@ -230,15 +220,6 @@ impl CreateTableBuilder {
self
}
pub fn table_properties(mut self, table_properties: Vec<SqlOption>) -> Self {
self.table_properties = table_properties;
self
}
pub fn with_options(mut self, with_options: Vec<SqlOption>) -> Self {
self.with_options = with_options;
self
}
pub fn file_format(mut self, file_format: Option<FileFormat>) -> Self {
self.file_format = file_format;
self
@ -268,31 +249,11 @@ impl CreateTableBuilder {
self
}
pub fn engine(mut self, engine: Option<TableEngine>) -> Self {
self.engine = engine;
self
}
pub fn comment(mut self, comment: Option<CommentDef>) -> Self {
pub fn comment_after_column_def(mut self, comment: Option<CommentDef>) -> Self {
self.comment = comment;
self
}
pub fn auto_increment_offset(mut self, offset: Option<u32>) -> Self {
self.auto_increment_offset = offset;
self
}
pub fn default_charset(mut self, default_charset: Option<String>) -> Self {
self.default_charset = default_charset;
self
}
pub fn collation(mut self, collation: Option<String>) -> Self {
self.collation = collation;
self
}
pub fn on_commit(mut self, on_commit: Option<OnCommit>) -> Self {
self.on_commit = on_commit;
self
@ -318,7 +279,7 @@ impl CreateTableBuilder {
self
}
pub fn cluster_by(mut self, cluster_by: Option<WrappedCollection<Vec<Ident>>>) -> Self {
pub fn cluster_by(mut self, cluster_by: Option<WrappedCollection<Vec<Expr>>>) -> Self {
self.cluster_by = cluster_by;
self
}
@ -328,11 +289,6 @@ impl CreateTableBuilder {
self
}
pub fn options(mut self, options: Option<Vec<SqlOption>>) -> Self {
self.options = options;
self
}
pub fn inherits(mut self, inherits: Option<Vec<ObjectName>>) -> Self {
self.inherits = inherits;
self
@ -422,6 +378,11 @@ impl CreateTableBuilder {
self
}
pub fn table_options(mut self, table_options: CreateTableOptions) -> Self {
self.table_options = table_options;
self
}
pub fn build(self) -> Statement {
Statement::CreateTable(CreateTable {
or_replace: self.or_replace,
@ -437,19 +398,13 @@ impl CreateTableBuilder {
constraints: self.constraints,
hive_distribution: self.hive_distribution,
hive_formats: self.hive_formats,
table_properties: self.table_properties,
with_options: self.with_options,
file_format: self.file_format,
location: self.location,
query: self.query,
without_rowid: self.without_rowid,
like: self.like,
clone: self.clone,
engine: self.engine,
comment: self.comment,
auto_increment_offset: self.auto_increment_offset,
default_charset: self.default_charset,
collation: self.collation,
on_commit: self.on_commit,
on_cluster: self.on_cluster,
primary_key: self.primary_key,
@ -457,7 +412,6 @@ impl CreateTableBuilder {
partition_by: self.partition_by,
cluster_by: self.cluster_by,
clustered_by: self.clustered_by,
options: self.options,
inherits: self.inherits,
strict: self.strict,
copy_grants: self.copy_grants,
@ -474,6 +428,7 @@ impl CreateTableBuilder {
catalog: self.catalog,
catalog_sync: self.catalog_sync,
storage_serialization_policy: self.storage_serialization_policy,
table_options: self.table_options,
})
}
}
@ -499,19 +454,13 @@ impl TryFrom<Statement> for CreateTableBuilder {
constraints,
hive_distribution,
hive_formats,
table_properties,
with_options,
file_format,
location,
query,
without_rowid,
like,
clone,
engine,
comment,
auto_increment_offset,
default_charset,
collation,
on_commit,
on_cluster,
primary_key,
@ -519,7 +468,6 @@ impl TryFrom<Statement> for CreateTableBuilder {
partition_by,
cluster_by,
clustered_by,
options,
inherits,
strict,
copy_grants,
@ -536,6 +484,7 @@ impl TryFrom<Statement> for CreateTableBuilder {
catalog,
catalog_sync,
storage_serialization_policy,
table_options,
}) => Ok(Self {
or_replace,
temporary,
@ -548,19 +497,13 @@ impl TryFrom<Statement> for CreateTableBuilder {
constraints,
hive_distribution,
hive_formats,
table_properties,
with_options,
file_format,
location,
query,
without_rowid,
like,
clone,
engine,
comment,
auto_increment_offset,
default_charset,
collation,
on_commit,
on_cluster,
primary_key,
@ -568,7 +511,6 @@ impl TryFrom<Statement> for CreateTableBuilder {
partition_by,
cluster_by,
clustered_by,
options,
inherits,
strict,
iceberg,
@ -587,6 +529,7 @@ impl TryFrom<Statement> for CreateTableBuilder {
catalog,
catalog_sync,
storage_serialization_policy,
table_options,
}),
_ => Err(ParserError::ParserError(format!(
"Expected create table statement, but received: {stmt}"
@ -599,9 +542,9 @@ impl TryFrom<Statement> for CreateTableBuilder {
#[derive(Default)]
pub(crate) struct CreateTableConfiguration {
pub partition_by: Option<Box<Expr>>,
pub cluster_by: Option<WrappedCollection<Vec<Ident>>>,
pub options: Option<Vec<SqlOption>>,
pub cluster_by: Option<WrappedCollection<Vec<Expr>>>,
pub inherits: Option<Vec<ObjectName>>,
pub table_options: CreateTableOptions,
}
#[cfg(test)]

View file

@ -21,8 +21,6 @@
#[cfg(not(feature = "std"))]
use alloc::string::String;
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
use core::fmt;
#[cfg(feature = "serde")]

File diff suppressed because it is too large Load diff

View file

@ -139,6 +139,11 @@ pub enum BinaryOperator {
DuckIntegerDivide,
/// MySQL [`DIV`](https://dev.mysql.com/doc/refman/8.0/en/arithmetic-functions.html) integer division
MyIntegerDivide,
/// MATCH operator, e.g. `a MATCH b` (SQLite-specific)
/// See <https://www.sqlite.org/lang_expr.html#the_like_glob_regexp_match_and_extract_operators>
Match,
/// REGEXP operator, e.g. `a REGEXP b` (SQLite-specific)
Regexp,
/// Support for custom operators (such as Postgres custom operators)
Custom(String),
/// Bitwise XOR, e.g. `a # b` (PostgreSQL-specific)
@ -350,6 +355,8 @@ impl fmt::Display for BinaryOperator {
BinaryOperator::BitwiseXor => f.write_str("^"),
BinaryOperator::DuckIntegerDivide => f.write_str("//"),
BinaryOperator::MyIntegerDivide => f.write_str("DIV"),
BinaryOperator::Match => f.write_str("MATCH"),
BinaryOperator::Regexp => f.write_str("REGEXP"),
BinaryOperator::Custom(s) => f.write_str(s),
BinaryOperator::PGBitwiseXor => f.write_str("#"),
BinaryOperator::PGBitwiseShiftLeft => f.write_str("<<"),

File diff suppressed because it is too large Load diff

View file

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
use crate::ast::query::SelectItemQualifiedWildcardKind;
use crate::ast::{query::SelectItemQualifiedWildcardKind, ColumnOptions};
use core::iter;
use crate::tokenizer::Span;
@ -28,15 +28,16 @@ use super::{
ConstraintCharacteristics, CopySource, CreateIndex, CreateTable, CreateTableOptions, Cte,
Delete, DoUpdate, ExceptSelectItem, ExcludeSelectItem, Expr, ExprWithAlias, Fetch, FromTable,
Function, FunctionArg, FunctionArgExpr, FunctionArgumentClause, FunctionArgumentList,
FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, Insert, Interpolate,
InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem, LateralView,
LimitClause, MatchRecognizePattern, Measure, NamedWindowDefinition, ObjectName, ObjectNamePart,
Offset, OnConflict, OnConflictAction, OnInsert, OrderBy, OrderByExpr, OrderByKind, Partition,
PivotValueSource, ProjectionSelect, Query, RaiseStatement, RaiseStatementValue,
ReferentialAction, RenameSelectItem, ReplaceSelectElement, ReplaceSelectItem, Select,
SelectInto, SelectItem, SetExpr, SqlOption, Statement, Subscript, SymbolDefinition, TableAlias,
TableAliasColumnDef, TableConstraint, TableFactor, TableObject, TableOptionsClustered,
TableWithJoins, UpdateTableFromKind, Use, Value, Values, ViewColumnDef,
FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, IndexColumn, Insert,
Interpolate, InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem,
LateralView, LimitClause, MatchRecognizePattern, Measure, NamedParenthesizedList,
NamedWindowDefinition, ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction,
OnInsert, OpenStatement, OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource,
ProjectionSelect, Query, RaiseStatement, RaiseStatementValue, ReferentialAction,
RenameSelectItem, ReplaceSelectElement, ReplaceSelectItem, Select, SelectInto, SelectItem,
SetExpr, SqlOption, Statement, Subscript, SymbolDefinition, TableAlias, TableAliasColumnDef,
TableConstraint, TableFactor, TableObject, TableOptionsClustered, TableWithJoins,
UpdateTableFromKind, Use, Value, Values, ViewColumnDef, WhileStatement,
WildcardAdditionalOptions, With, WithFill,
};
@ -98,10 +99,11 @@ impl Spanned for Query {
order_by,
limit_clause,
fetch,
locks: _, // todo
for_clause: _, // todo, mssql specific
settings: _, // todo, clickhouse specific
format_clause: _, // todo, clickhouse specific
locks: _, // todo
for_clause: _, // todo, mssql specific
settings: _, // todo, clickhouse specific
format_clause: _, // todo, clickhouse specific
pipe_operators: _, // todo bigquery specific
} = self;
union_spans(
@ -310,7 +312,6 @@ impl Spanned for Statement {
table_names,
partitions,
table: _,
only: _,
identity: _,
cascade: _,
on_cluster: _,
@ -338,6 +339,7 @@ impl Spanned for Statement {
} => source.span(),
Statement::Case(stmt) => stmt.span(),
Statement::If(stmt) => stmt.span(),
Statement::While(stmt) => stmt.span(),
Statement::Raise(stmt) => stmt.span(),
Statement::Call(function) => function.span(),
Statement::Copy {
@ -364,6 +366,7 @@ impl Spanned for Statement {
from_query: _,
partition: _,
} => Span::empty(),
Statement::Open(open) => open.span(),
Statement::Close { cursor } => match cursor {
CloseCursor::All => Span::empty(),
CloseCursor::Specific { name } => name.span,
@ -420,6 +423,7 @@ impl Spanned for Statement {
Statement::CreateIndex(create_index) => create_index.span(),
Statement::CreateRole { .. } => Span::empty(),
Statement::CreateSecret { .. } => Span::empty(),
Statement::CreateServer { .. } => Span::empty(),
Statement::CreateConnector { .. } => Span::empty(),
Statement::AlterTable {
name,
@ -428,6 +432,7 @@ impl Spanned for Statement {
operations,
location: _,
on_cluster,
iceberg: _,
} => union_spans(
core::iter::once(name.span())
.chain(operations.iter().map(|i| i.span()))
@ -454,6 +459,7 @@ impl Spanned for Statement {
Statement::DetachDuckDBDatabase { .. } => Span::empty(),
Statement::Drop { .. } => Span::empty(),
Statement::DropFunction { .. } => Span::empty(),
Statement::DropDomain { .. } => Span::empty(),
Statement::DropProcedure { .. } => Span::empty(),
Statement::DropSecret { .. } => Span::empty(),
Statement::Declare { .. } => Span::empty(),
@ -479,6 +485,7 @@ impl Spanned for Statement {
Statement::CreateSchema { .. } => Span::empty(),
Statement::CreateDatabase { .. } => Span::empty(),
Statement::CreateFunction { .. } => Span::empty(),
Statement::CreateDomain { .. } => Span::empty(),
Statement::CreateTrigger { .. } => Span::empty(),
Statement::DropTrigger { .. } => Span::empty(),
Statement::CreateProcedure { .. } => Span::empty(),
@ -486,6 +493,7 @@ impl Spanned for Statement {
Statement::CreateStage { .. } => Span::empty(),
Statement::Assert { .. } => Span::empty(),
Statement::Grant { .. } => Span::empty(),
Statement::Deny { .. } => Span::empty(),
Statement::Revoke { .. } => Span::empty(),
Statement::Deallocate { .. } => Span::empty(),
Statement::Execute { .. } => Span::empty(),
@ -563,27 +571,20 @@ impl Spanned for CreateTable {
constraints,
hive_distribution: _, // hive specific
hive_formats: _, // hive specific
table_properties,
with_options,
file_format: _, // enum
location: _, // string, no span
file_format: _, // enum
location: _, // string, no span
query,
without_rowid: _, // bool
like,
clone,
engine: _, // todo
comment: _, // todo, no span
auto_increment_offset: _, // u32, no span
default_charset: _, // string, no span
collation: _, // string, no span
on_commit: _, // enum
comment: _, // todo, no span
on_commit: _,
on_cluster: _, // todo, clickhouse specific
primary_key: _, // todo, clickhouse specific
order_by: _, // todo, clickhouse specific
partition_by: _, // todo, BigQuery specific
cluster_by: _, // todo, BigQuery specific
clustered_by: _, // todo, Hive specific
options: _, // todo, BigQuery specific
inherits: _, // todo, PostgreSQL specific
strict: _, // bool
copy_grants: _, // bool
@ -599,15 +600,15 @@ impl Spanned for CreateTable {
base_location: _, // todo, Snowflake specific
catalog: _, // todo, Snowflake specific
catalog_sync: _, // todo, Snowflake specific
storage_serialization_policy: _, // todo, Snowflake specific
storage_serialization_policy: _,
table_options,
} = self;
union_spans(
core::iter::once(name.span())
.chain(core::iter::once(table_options.span()))
.chain(columns.iter().map(|i| i.span()))
.chain(constraints.iter().map(|i| i.span()))
.chain(table_properties.iter().map(|i| i.span()))
.chain(with_options.iter().map(|i| i.span()))
.chain(query.iter().map(|i| i.span()))
.chain(like.iter().map(|i| i.span()))
.chain(clone.iter().map(|i| i.span())),
@ -651,7 +652,7 @@ impl Spanned for TableConstraint {
name.iter()
.map(|i| i.span)
.chain(index_name.iter().map(|i| i.span))
.chain(columns.iter().map(|i| i.span))
.chain(columns.iter().map(|i| i.span()))
.chain(characteristics.iter().map(|i| i.span())),
),
TableConstraint::PrimaryKey {
@ -665,12 +666,13 @@ impl Spanned for TableConstraint {
name.iter()
.map(|i| i.span)
.chain(index_name.iter().map(|i| i.span))
.chain(columns.iter().map(|i| i.span))
.chain(columns.iter().map(|i| i.span()))
.chain(characteristics.iter().map(|i| i.span())),
),
TableConstraint::ForeignKey {
name,
columns,
index_name,
foreign_table,
referred_columns,
on_delete,
@ -679,6 +681,7 @@ impl Spanned for TableConstraint {
} => union_spans(
name.iter()
.map(|i| i.span)
.chain(index_name.iter().map(|i| i.span))
.chain(columns.iter().map(|i| i.span))
.chain(core::iter::once(foreign_table.span()))
.chain(referred_columns.iter().map(|i| i.span))
@ -686,9 +689,11 @@ impl Spanned for TableConstraint {
.chain(on_update.iter().map(|i| i.span()))
.chain(characteristics.iter().map(|i| i.span())),
),
TableConstraint::Check { name, expr } => {
expr.span().union_opt(&name.as_ref().map(|i| i.span))
}
TableConstraint::Check {
name,
expr,
enforced: _,
} => expr.span().union_opt(&name.as_ref().map(|i| i.span)),
TableConstraint::Index {
display_as_key: _,
name,
@ -697,7 +702,7 @@ impl Spanned for TableConstraint {
} => union_spans(
name.iter()
.map(|i| i.span)
.chain(columns.iter().map(|i| i.span)),
.chain(columns.iter().map(|i| i.span())),
),
TableConstraint::FulltextOrSpatial {
fulltext: _,
@ -708,7 +713,7 @@ impl Spanned for TableConstraint {
opt_index_name
.iter()
.map(|i| i.span)
.chain(columns.iter().map(|i| i.span)),
.chain(columns.iter().map(|i| i.span())),
),
}
}
@ -742,6 +747,12 @@ impl Spanned for CreateIndex {
}
}
impl Spanned for IndexColumn {
fn span(&self) -> Span {
self.column.span()
}
}
impl Spanned for CaseStatement {
fn span(&self) -> Span {
let CaseStatement {
@ -774,6 +785,14 @@ impl Spanned for IfStatement {
}
}
impl Spanned for WhileStatement {
fn span(&self) -> Span {
let WhileStatement { while_block } = self;
while_block.span()
}
}
impl Spanned for ConditionalStatements {
fn span(&self) -> Span {
match self {
@ -864,6 +883,7 @@ impl Spanned for ColumnOption {
ColumnOption::OnConflict(..) => Span::empty(),
ColumnOption::Policy(..) => Span::empty(),
ColumnOption::Tags(..) => Span::empty(),
ColumnOption::Srid(..) => Span::empty(),
}
}
}
@ -905,6 +925,7 @@ impl Spanned for AlterColumnOperation {
AlterColumnOperation::SetDataType {
data_type: _,
using,
had_set: _,
} => using.as_ref().map_or(Span::empty(), |u| u.span()),
AlterColumnOperation::AddGenerated { .. } => Span::empty(),
}
@ -972,10 +993,13 @@ impl Spanned for ViewColumnDef {
options,
} = self;
union_spans(
core::iter::once(name.span)
.chain(options.iter().flat_map(|i| i.iter().map(|k| k.span()))),
)
name.span.union_opt(&options.as_ref().map(|o| o.span()))
}
}
impl Spanned for ColumnOptions {
fn span(&self) -> Span {
union_spans(self.as_slice().iter().map(|i| i.span()))
}
}
@ -992,6 +1016,14 @@ impl Spanned for SqlOption {
} => union_spans(
core::iter::once(column_name.span).chain(for_values.iter().map(|i| i.span())),
),
SqlOption::TableSpace(_) => Span::empty(),
SqlOption::Comment(_) => Span::empty(),
SqlOption::NamedParenthesizedList(NamedParenthesizedList {
key: name,
name: value,
values,
}) => union_spans(core::iter::once(name.span).chain(values.iter().map(|i| i.span)))
.union_opt(&value.as_ref().map(|i| i.span)),
}
}
}
@ -1028,7 +1060,11 @@ impl Spanned for CreateTableOptions {
match self {
CreateTableOptions::None => Span::empty(),
CreateTableOptions::With(vec) => union_spans(vec.iter().map(|i| i.span())),
CreateTableOptions::Options(vec) => union_spans(vec.iter().map(|i| i.span())),
CreateTableOptions::Options(vec) => {
union_spans(vec.as_slice().iter().map(|i| i.span()))
}
CreateTableOptions::Plain(vec) => union_spans(vec.iter().map(|i| i.span())),
CreateTableOptions::TableProperties(vec) => union_spans(vec.iter().map(|i| i.span())),
}
}
}
@ -1040,7 +1076,10 @@ impl Spanned for CreateTableOptions {
impl Spanned for AlterTableOperation {
fn span(&self) -> Span {
match self {
AlterTableOperation::AddConstraint(table_constraint) => table_constraint.span(),
AlterTableOperation::AddConstraint {
constraint,
not_valid: _,
} => constraint.span(),
AlterTableOperation::AddColumn {
column_keyword: _,
if_not_exists: _,
@ -1072,10 +1111,11 @@ impl Spanned for AlterTableOperation {
drop_behavior: _,
} => name.span,
AlterTableOperation::DropColumn {
column_name,
has_column_keyword: _,
column_names,
if_exists: _,
drop_behavior: _,
} => column_name.span,
} => union_spans(column_names.iter().map(|i| i.span)),
AlterTableOperation::AttachPartition { partition } => partition.span(),
AlterTableOperation::DetachPartition { partition } => partition.span(),
AlterTableOperation::FreezePartition {
@ -1092,6 +1132,7 @@ impl Spanned for AlterTableOperation {
.union_opt(&with_name.as_ref().map(|n| n.span)),
AlterTableOperation::DropPrimaryKey => Span::empty(),
AlterTableOperation::DropForeignKey { name } => name.span,
AlterTableOperation::DropIndex { name } => name.span,
AlterTableOperation::EnableAlwaysRule { name } => name.span,
AlterTableOperation::EnableAlwaysTrigger { name } => name.span,
AlterTableOperation::EnableReplicaRule { name } => name.span,
@ -1158,6 +1199,8 @@ impl Spanned for AlterTableOperation {
AlterTableOperation::Algorithm { .. } => Span::empty(),
AlterTableOperation::AutoIncrement { value, .. } => value.span(),
AlterTableOperation::Lock { .. } => Span::empty(),
AlterTableOperation::ReplicaIdentity { .. } => Span::empty(),
AlterTableOperation::ValidateConstraint { name } => name.span,
}
}
}
@ -1373,7 +1416,6 @@ impl Spanned for AssignmentTarget {
/// f.e. `IS NULL <expr>` reports as `<expr>::span`.
///
/// Missing spans:
/// - [Expr::TypedString] # missing span for data_type
/// - [Expr::MatchAgainst] # MySQL specific
/// - [Expr::RLike] # MySQL specific
/// - [Expr::Struct] # BigQuery specific
@ -1545,18 +1587,24 @@ impl Spanned for Expr {
),
Expr::Prefixed { value, .. } => value.span(),
Expr::Case {
case_token,
end_token,
operand,
conditions,
else_result,
} => union_spans(
operand
.as_ref()
.map(|i| i.span())
.into_iter()
.chain(conditions.iter().flat_map(|case_when| {
[case_when.condition.span(), case_when.result.span()]
}))
.chain(else_result.as_ref().map(|i| i.span())),
iter::once(case_token.0.span)
.chain(
operand
.as_ref()
.map(|i| i.span())
.into_iter()
.chain(conditions.iter().flat_map(|case_when| {
[case_when.condition.span(), case_when.result.span()]
}))
.chain(else_result.as_ref().map(|i| i.span())),
)
.chain(iter::once(end_token.0.span)),
),
Expr::Exists { subquery, .. } => subquery.span(),
Expr::Subquery(query) => query.span(),
@ -1576,6 +1624,7 @@ impl Spanned for Expr {
Expr::OuterJoin(expr) => expr.span(),
Expr::Prior(expr) => expr.span(),
Expr::Lambda(_) => Span::empty(),
Expr::MemberOf(member_of) => member_of.value.span().union(&member_of.array.span()),
}
}
}
@ -1926,6 +1975,7 @@ impl Spanned for TableFactor {
TableFactor::Unpivot {
table,
value,
null_inclusion: _,
name,
columns,
alias,
@ -2295,6 +2345,13 @@ impl Spanned for BeginEndStatements {
}
}
impl Spanned for OpenStatement {
fn span(&self) -> Span {
let OpenStatement { cursor_name } = self;
cursor_name.span
}
}
#[cfg(test)]
pub mod tests {
use crate::dialect::{Dialect, GenericDialect, SnowflakeDialect};
@ -2434,4 +2491,16 @@ pub mod tests {
assert_eq!(test.get_source(body_span), "SELECT cte.* FROM cte");
}
#[test]
fn test_case_expr_span() {
let dialect = &GenericDialect;
let mut test = SpanTest::new(dialect, "CASE 1 WHEN 2 THEN 3 ELSE 4 END");
let expr = test.0.parse_expr().unwrap();
let expr_span = expr.span();
assert_eq!(
test.get_source(expr_span),
"CASE 1 WHEN 2 THEN 3 ELSE 4 END"
);
}
}

View file

@ -110,6 +110,7 @@ impl fmt::Display for TriggerEvent {
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub enum TriggerPeriod {
For,
After,
Before,
InsteadOf,
@ -118,6 +119,7 @@ pub enum TriggerPeriod {
impl fmt::Display for TriggerPeriod {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TriggerPeriod::For => write!(f, "FOR"),
TriggerPeriod::After => write!(f, "AFTER"),
TriggerPeriod::Before => write!(f, "BEFORE"),
TriggerPeriod::InsteadOf => write!(f, "INSTEAD OF"),

View file

@ -116,7 +116,6 @@ impl From<ValueWithSpan> for Value {
derive(Visit, VisitMut),
visit(with = "visit_value")
)]
pub enum Value {
/// Numeric literal
#[cfg(not(feature = "bigdecimal"))]
@ -456,30 +455,38 @@ impl fmt::Display for EscapeQuotedString<'_> {
// | `"A\"B\"A"` | default | `DoubleQuotedString(String::from("A\"B\"A"))` | `"A""B""A"` |
let quote = self.quote;
let mut previous_char = char::default();
let mut peekable_chars = self.string.chars().peekable();
while let Some(&ch) = peekable_chars.peek() {
let mut start_idx = 0;
let mut peekable_chars = self.string.char_indices().peekable();
while let Some(&(idx, ch)) = peekable_chars.peek() {
match ch {
char if char == quote => {
if previous_char == '\\' {
write!(f, "{char}")?;
// the quote is already escaped with a backslash, skip
peekable_chars.next();
continue;
}
peekable_chars.next();
if peekable_chars.peek().map(|c| *c == quote).unwrap_or(false) {
write!(f, "{char}{char}")?;
peekable_chars.next();
} else {
write!(f, "{char}{char}")?;
match peekable_chars.peek() {
Some((_, c)) if *c == quote => {
// the quote is already escaped with another quote, skip
peekable_chars.next();
}
_ => {
// The quote is not escaped.
// Including idx in the range, so the quote at idx will be printed twice:
// in this call to write_str() and in the next one.
f.write_str(&self.string[start_idx..=idx])?;
start_idx = idx;
}
}
}
_ => {
write!(f, "{ch}")?;
peekable_chars.next();
}
}
previous_char = ch;
}
f.write_str(&self.string[start_idx..])?;
Ok(())
}
}
@ -543,16 +550,16 @@ impl fmt::Display for EscapeUnicodeStringLiteral<'_> {
write!(f, r#"\\"#)?;
}
x if x.is_ascii() => {
write!(f, "{}", c)?;
write!(f, "{c}")?;
}
_ => {
let codepoint = c as u32;
// if the character fits in 32 bits, we can use the \XXXX format
// otherwise, we need to use the \+XXXXXX format
if codepoint <= 0xFFFF {
write!(f, "\\{:04X}", codepoint)?;
write!(f, "\\{codepoint:04X}")?;
} else {
write!(f, "\\+{:06X}", codepoint)?;
write!(f, "\\+{codepoint:06X}")?;
}
}
}

View file

@ -741,7 +741,7 @@ mod tests {
}
}
fn do_visit<V: Visitor>(sql: &str, visitor: &mut V) -> Statement {
fn do_visit<V: Visitor<Break = ()>>(sql: &str, visitor: &mut V) -> Statement {
let dialect = GenericDialect {};
let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap();
let s = Parser::new(&dialect)
@ -749,7 +749,8 @@ mod tests {
.parse_statement()
.unwrap();
s.visit(visitor);
let flow = s.visit(visitor);
assert_eq!(flow, ControlFlow::Continue(()));
s
}
@ -925,10 +926,10 @@ mod tests {
#[test]
fn overflow() {
let cond = (0..1000)
.map(|n| format!("X = {}", n))
.map(|n| format!("X = {n}"))
.collect::<Vec<_>>()
.join(" OR ");
let sql = format!("SELECT x where {0}", cond);
let sql = format!("SELECT x where {cond}");
let dialect = GenericDialect {};
let tokens = Tokenizer::new(&dialect, sql.as_str()).tokenize().unwrap();
@ -938,7 +939,8 @@ mod tests {
.unwrap();
let mut visitor = QuickVisitor {};
s.visit(&mut visitor);
let flow = s.visit(&mut visitor);
assert_eq!(flow, ControlFlow::Continue(()));
}
}
@ -969,7 +971,7 @@ mod visit_mut_tests {
}
}
fn do_visit_mut<V: VisitorMut>(sql: &str, visitor: &mut V) -> Statement {
fn do_visit_mut<V: VisitorMut<Break = ()>>(sql: &str, visitor: &mut V) -> Statement {
let dialect = GenericDialect {};
let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap();
let mut s = Parser::new(&dialect)
@ -977,7 +979,8 @@ mod visit_mut_tests {
.parse_statement()
.unwrap();
s.visit(visitor);
let flow = s.visit(visitor);
assert_eq!(flow, ControlFlow::Continue(()));
s
}

View file

@ -46,7 +46,11 @@ pub struct BigQueryDialect;
impl Dialect for BigQueryDialect {
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
self.maybe_parse_statement(parser)
if parser.parse_keyword(Keyword::BEGIN) {
return Some(parser.parse_begin_exception_end());
}
None
}
/// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#identifiers>
@ -136,49 +140,8 @@ impl Dialect for BigQueryDialect {
fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool {
!RESERVED_FOR_COLUMN_ALIAS.contains(kw)
}
}
impl BigQueryDialect {
fn maybe_parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
if parser.peek_keyword(Keyword::BEGIN) {
return Some(self.parse_begin(parser));
}
None
}
/// Parse a `BEGIN` statement.
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend>
fn parse_begin(&self, parser: &mut Parser) -> Result<Statement, ParserError> {
parser.expect_keyword(Keyword::BEGIN)?;
let statements = parser.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
let has_exception_when_clause = parser.parse_keywords(&[
Keyword::EXCEPTION,
Keyword::WHEN,
Keyword::ERROR,
Keyword::THEN,
]);
let exception_statements = if has_exception_when_clause {
if !parser.peek_keyword(Keyword::END) {
Some(parser.parse_statement_list(&[Keyword::END])?)
} else {
Some(Default::default())
}
} else {
None
};
parser.expect_keyword(Keyword::END)?;
Ok(Statement::StartTransaction {
begin: true,
statements,
exception_statements,
has_end_keyword: true,
transaction: None,
modifier: None,
modes: Default::default(),
})
fn supports_pipe_operator(&self) -> bool {
true
}
}

View file

@ -52,6 +52,10 @@ impl Dialect for GenericDialect {
true
}
fn supports_left_associative_joins_without_parens(&self) -> bool {
true
}
fn supports_connect_by(&self) -> bool {
true
}
@ -108,6 +112,14 @@ impl Dialect for GenericDialect {
true
}
fn supports_from_first_select(&self) -> bool {
true
}
fn supports_projection_trailing_commas(&self) -> bool {
true
}
fn supports_asc_desc_in_column_definition(&self) -> bool {
true
}

View file

@ -49,7 +49,7 @@ pub use self::postgresql::PostgreSqlDialect;
pub use self::redshift::RedshiftSqlDialect;
pub use self::snowflake::SnowflakeDialect;
pub use self::sqlite::SQLiteDialect;
use crate::ast::{ColumnOption, Expr, Statement};
use crate::ast::{ColumnOption, Expr, GranteesType, Statement};
pub use crate::keywords;
use crate::keywords::Keyword;
use crate::parser::{Parser, ParserError};
@ -278,6 +278,34 @@ pub trait Dialect: Debug + Any {
false
}
/// Indicates whether the dialect supports left-associative join parsing
/// by default when parentheses are omitted in nested joins.
///
/// Most dialects (like MySQL or Postgres) assume **left-associative** precedence,
/// so a query like:
///
/// ```sql
/// SELECT * FROM t1 NATURAL JOIN t5 INNER JOIN t0 ON ...
/// ```
/// is interpreted as:
/// ```sql
/// ((t1 NATURAL JOIN t5) INNER JOIN t0 ON ...)
/// ```
/// and internally represented as a **flat list** of joins.
///
/// In contrast, some dialects (e.g. **Snowflake**) assume **right-associative**
/// precedence and interpret the same query as:
/// ```sql
/// (t1 NATURAL JOIN (t5 INNER JOIN t0 ON ...))
/// ```
/// which results in a **nested join** structure in the AST.
///
/// If this method returns `false`, the parser must build nested join trees
/// even in the absence of parentheses to reflect the correct associativity
fn supports_left_associative_joins_without_parens(&self) -> bool {
true
}
/// Returns true if the dialect supports the `(+)` syntax for OUTER JOIN.
fn supports_outer_join_operator(&self) -> bool {
false
@ -518,6 +546,20 @@ pub trait Dialect: Debug + Any {
false
}
/// Return true if the dialect supports pipe operator.
///
/// Example:
/// ```sql
/// SELECT *
/// FROM table
/// |> limit 1
/// ```
///
/// See <https://cloud.google.com/bigquery/docs/pipe-syntax-guide#basic_syntax>
fn supports_pipe_operator(&self) -> bool {
false
}
/// Does the dialect support MySQL-style `'user'@'host'` grantee syntax?
fn supports_user_host_grantee(&self) -> bool {
false
@ -573,7 +615,7 @@ pub trait Dialect: Debug + Any {
}
let token = parser.peek_token();
debug!("get_next_precedence_full() {:?}", token);
debug!("get_next_precedence_full() {token:?}");
match token.token {
Token::Word(w) if w.keyword == Keyword::OR => Ok(p!(Or)),
Token::Word(w) if w.keyword == Keyword::AND => Ok(p!(And)),
@ -605,7 +647,9 @@ pub trait Dialect: Debug + Any {
Token::Word(w) if w.keyword == Keyword::ILIKE => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::RLIKE => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)),
_ => Ok(self.prec_unknown()),
},
Token::Word(w) if w.keyword == Keyword::IS => Ok(p!(Is)),
@ -616,7 +660,9 @@ pub trait Dialect: Debug + Any {
Token::Word(w) if w.keyword == Keyword::ILIKE => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::RLIKE => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)),
Token::Word(w) if w.keyword == Keyword::OPERATOR => Ok(p!(Between)),
Token::Word(w) if w.keyword == Keyword::DIV => Ok(p!(MulDivModOp)),
Token::Period => Ok(p!(Period)),
@ -894,6 +940,11 @@ pub trait Dialect: Debug + Any {
&[]
}
/// Returns grantee types that should be treated as identifiers
fn get_reserved_grantees_types(&self) -> &[GranteesType] {
&[]
}
/// Returns true if this dialect supports the `TABLESAMPLE` option
/// before the table alias option. For example:
///
@ -1007,6 +1058,24 @@ pub trait Dialect: Debug + Any {
fn supports_set_names(&self) -> bool {
false
}
fn supports_space_separated_column_options(&self) -> bool {
false
}
/// Returns true if the dialect supports the `USING` clause in an `ALTER COLUMN` statement.
/// Example:
/// ```sql
/// ALTER TABLE tbl ALTER COLUMN col SET DATA TYPE <type> USING <exp>`
/// ```
fn supports_alter_column_type_using(&self) -> bool {
false
}
/// Returns true if the dialect supports `ALTER TABLE tbl DROP COLUMN c1, ..., cn`
fn supports_comma_separated_drop_column_list(&self) -> bool {
false
}
}
/// This represents the operators for which precedence must be defined

View file

@ -17,7 +17,8 @@
use crate::ast::helpers::attached_token::AttachedToken;
use crate::ast::{
BeginEndStatements, ConditionalStatementBlock, ConditionalStatements, IfStatement, Statement,
BeginEndStatements, ConditionalStatementBlock, ConditionalStatements, GranteesType,
IfStatement, Statement, TriggerObject,
};
use crate::dialect::Dialect;
use crate::keywords::{self, Keyword};
@ -51,6 +52,10 @@ impl Dialect for MsSqlDialect {
|| ch == '_'
}
fn identifier_quote_style(&self, _identifier: &str) -> Option<char> {
Some('[')
}
/// SQL Server has `CONVERT(type, value)` instead of `CONVERT(value, type)`
/// <https://learn.microsoft.com/en-us/sql/t-sql/functions/cast-and-convert-transact-sql?view=sql-server-ver16>
fn convert_type_before_value(&self) -> bool {
@ -118,6 +123,11 @@ impl Dialect for MsSqlDialect {
true
}
/// See <https://learn.microsoft.com/en-us/sql/relational-databases/security/authentication-access/server-level-roles>
fn get_reserved_grantees_types(&self) -> &[GranteesType] {
&[GranteesType::Public]
}
fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool {
!keywords::RESERVED_FOR_COLUMN_ALIAS.contains(kw) && !RESERVED_FOR_COLUMN_ALIAS.contains(kw)
}
@ -125,6 +135,15 @@ impl Dialect for MsSqlDialect {
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
if parser.peek_keyword(Keyword::IF) {
Some(self.parse_if_stmt(parser))
} else if parser.parse_keywords(&[Keyword::CREATE, Keyword::TRIGGER]) {
Some(self.parse_create_trigger(parser, false))
} else if parser.parse_keywords(&[
Keyword::CREATE,
Keyword::OR,
Keyword::ALTER,
Keyword::TRIGGER,
]) {
Some(self.parse_create_trigger(parser, true))
} else {
None
}
@ -215,6 +234,42 @@ impl MsSqlDialect {
}))
}
/// Parse `CREATE TRIGGER` for [MsSql]
///
/// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-trigger-transact-sql
fn parse_create_trigger(
&self,
parser: &mut Parser,
or_alter: bool,
) -> Result<Statement, ParserError> {
let name = parser.parse_object_name(false)?;
parser.expect_keyword_is(Keyword::ON)?;
let table_name = parser.parse_object_name(false)?;
let period = parser.parse_trigger_period()?;
let events = parser.parse_comma_separated(Parser::parse_trigger_event)?;
parser.expect_keyword_is(Keyword::AS)?;
let statements = Some(parser.parse_conditional_statements(&[Keyword::END])?);
Ok(Statement::CreateTrigger {
or_alter,
or_replace: false,
is_constraint: false,
name,
period,
events,
table_name,
referenced_table_name: None,
referencing: Vec::new(),
trigger_object: TriggerObject::Statement,
include_each: false,
condition: None,
exec_body: None,
statements,
characteristics: None,
})
}
/// Parse a sequence of statements, optionally separated by semicolon.
///
/// Stops parsing when reaching EOF or the given keyword.

View file

@ -104,7 +104,7 @@ impl Dialect for PostgreSqlDialect {
fn get_next_precedence(&self, parser: &Parser) -> Option<Result<u8, ParserError>> {
let token = parser.peek_token();
debug!("get_next_precedence() {:?}", token);
debug!("get_next_precedence() {token:?}");
// we only return some custom value here when the behaviour (not merely the numeric value) differs
// from the default implementation
@ -258,4 +258,8 @@ impl Dialect for PostgreSqlDialect {
fn supports_set_names(&self) -> bool {
true
}
fn supports_alter_column_type_using(&self) -> bool {
true
}
}

View file

@ -80,13 +80,15 @@ impl Dialect for RedshiftSqlDialect {
}
fn is_identifier_start(&self, ch: char) -> bool {
// Extends Postgres dialect with sharp
PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#'
// Extends Postgres dialect with sharp and UTF-8 multibyte chars
// https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#' || !ch.is_ascii()
}
fn is_identifier_part(&self, ch: char) -> bool {
// Extends Postgres dialect with sharp
PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#'
// Extends Postgres dialect with sharp and UTF-8 multibyte chars
// https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#' || !ch.is_ascii()
}
/// redshift has `CONVERT(type, value)` instead of `CONVERT(value, type)`

View file

@ -25,8 +25,8 @@ use crate::ast::helpers::stmt_data_loading::{
use crate::ast::{
ColumnOption, ColumnPolicy, ColumnPolicyProperty, CopyIntoSnowflakeKind, Ident,
IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind,
IdentityPropertyOrder, ObjectName, RowAccessPolicy, ShowObjects, Statement, TagsColumnOption,
WrappedCollection,
IdentityPropertyOrder, ObjectName, RowAccessPolicy, ShowObjects, SqlOption, Statement,
TagsColumnOption, WrappedCollection,
};
use crate::dialect::{Dialect, Precedence};
use crate::keywords::Keyword;
@ -131,6 +131,10 @@ impl Dialect for SnowflakeDialect {
}
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
if parser.parse_keyword(Keyword::BEGIN) {
return Some(parser.parse_begin_exception_end());
}
if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) {
// ALTER SESSION
let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) {
@ -279,6 +283,10 @@ impl Dialect for SnowflakeDialect {
true
}
fn supports_left_associative_joins_without_parens(&self) -> bool {
false
}
fn is_reserved_for_identifier(&self, kw: Keyword) -> bool {
// Unreserve some keywords that Snowflake accepts as identifiers
// See: https://docs.snowflake.com/en/sql-reference/reserved-keywords
@ -293,9 +301,8 @@ impl Dialect for SnowflakeDialect {
true
}
fn is_select_item_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
explicit
|| match kw {
fn is_column_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool {
match kw {
// The following keywords can be considered an alias as long as
// they are not followed by other tokens that may change their meaning
// e.g. `SELECT * EXCEPT (col1) FROM tbl`
@ -352,6 +359,14 @@ impl Dialect for SnowflakeDialect {
fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
&RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR
}
fn supports_space_separated_column_options(&self) -> bool {
true
}
fn supports_comma_separated_drop_column_list(&self) -> bool {
true
}
}
fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result<Statement, ParserError> {
@ -417,6 +432,8 @@ pub fn parse_create_table(
// "CREATE TABLE x COPY GRANTS (c INT)" and "CREATE TABLE x (c INT) COPY GRANTS" are both
// accepted by Snowflake
let mut plain_options = vec![];
loop {
let next_token = parser.next_token();
match &next_token.token {
@ -428,7 +445,9 @@ pub fn parse_create_table(
Keyword::COMMENT => {
// Rewind the COMMENT keyword
parser.prev_token();
builder = builder.comment(parser.parse_optional_inline_comment()?);
if let Some(comment_def) = parser.parse_optional_inline_comment()? {
plain_options.push(SqlOption::Comment(comment_def))
}
}
Keyword::AS => {
let query = parser.parse_query()?;
@ -449,7 +468,7 @@ pub fn parse_create_table(
parser.expect_keyword_is(Keyword::BY)?;
parser.expect_token(&Token::LParen)?;
let cluster_by = Some(WrappedCollection::Parentheses(
parser.parse_comma_separated(|p| p.parse_identifier())?,
parser.parse_comma_separated(|p| p.parse_expr())?,
));
parser.expect_token(&Token::RParen)?;
@ -556,6 +575,9 @@ pub fn parse_create_table(
builder.storage_serialization_policy =
Some(parse_storage_serialization_policy(parser)?);
}
Keyword::IF if parser.parse_keywords(&[Keyword::NOT, Keyword::EXISTS]) => {
builder = builder.if_not_exists(true);
}
_ => {
return parser.expected("end of statement", next_token);
}
@ -589,6 +611,13 @@ pub fn parse_create_table(
}
}
}
let table_options = if !plain_options.is_empty() {
crate::ast::CreateTableOptions::Plain(plain_options)
} else {
crate::ast::CreateTableOptions::None
};
builder = builder.table_options(table_options);
if iceberg && builder.base_location.is_none() {
return Err(ParserError::ParserError(
@ -1163,7 +1192,7 @@ fn parse_column_policy_property(
parser: &mut Parser,
with: bool,
) -> Result<ColumnPolicyProperty, ParserError> {
let policy_name = parser.parse_identifier()?;
let policy_name = parser.parse_object_name(false)?;
let using_columns = if parser.parse_keyword(Keyword::USING) {
parser.expect_token(&Token::LParen)?;
let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;

View file

@ -15,7 +15,11 @@
// specific language governing permissions and limitations
// under the License.
use crate::ast::Statement;
#[cfg(not(feature = "std"))]
use alloc::boxed::Box;
use crate::ast::BinaryOperator;
use crate::ast::{Expr, Statement};
use crate::dialect::Dialect;
use crate::keywords::Keyword;
use crate::parser::{Parser, ParserError};
@ -70,6 +74,27 @@ impl Dialect for SQLiteDialect {
}
}
fn parse_infix(
&self,
parser: &mut crate::parser::Parser,
expr: &crate::ast::Expr,
_precedence: u8,
) -> Option<Result<crate::ast::Expr, ParserError>> {
// Parse MATCH and REGEXP as operators
// See <https://www.sqlite.org/lang_expr.html#the_like_glob_regexp_match_and_extract_operators>
for (keyword, op) in [
(Keyword::REGEXP, BinaryOperator::Regexp),
(Keyword::MATCH, BinaryOperator::Match),
] {
if parser.parse_keyword(keyword) {
let left = Box::new(expr.clone());
let right = Box::new(parser.parse_expr().unwrap());
return Some(Ok(Expr::BinaryOp { left, op, right }));
}
}
None
}
fn supports_in_empty_list(&self) -> bool {
true
}

135
src/display_utils.rs Normal file
View file

@ -0,0 +1,135 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Utilities for formatting SQL AST nodes with pretty printing support.
//!
//! The module provides formatters that implement the `Display` trait with support
//! for both regular (`{}`) and pretty (`{:#}`) formatting modes. Pretty printing
//! adds proper indentation and line breaks to make SQL statements more readable.
use core::fmt::{self, Display, Write};
/// A wrapper around a value that adds an indent to the value when displayed with {:#}.
pub(crate) struct Indent<T>(pub T);
const INDENT: &str = " ";
impl<T> Display for Indent<T>
where
T: Display,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if f.alternate() {
f.write_str(INDENT)?;
write!(Indent(f), "{:#}", self.0)
} else {
self.0.fmt(f)
}
}
}
/// Adds an indent to the inner writer
impl<T> Write for Indent<T>
where
T: Write,
{
fn write_str(&mut self, s: &str) -> fmt::Result {
self.0.write_str(s)?;
// Our NewLine and SpaceOrNewline utils always print individual newlines as a single-character string.
if s == "\n" {
self.0.write_str(INDENT)?;
}
Ok(())
}
}
/// A value that inserts a newline when displayed with {:#}, but not when displayed with {}.
pub(crate) struct NewLine;
impl Display for NewLine {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if f.alternate() {
f.write_char('\n')
} else {
Ok(())
}
}
}
/// A value that inserts a space when displayed with {}, but a newline when displayed with {:#}.
pub(crate) struct SpaceOrNewline;
impl Display for SpaceOrNewline {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if f.alternate() {
f.write_char('\n')
} else {
f.write_char(' ')
}
}
}
/// A value that displays a comma-separated list of values.
/// When pretty-printed (using {:#}), it displays each value on a new line.
pub(crate) struct DisplayCommaSeparated<'a, T: fmt::Display>(pub(crate) &'a [T]);
impl<T: fmt::Display> fmt::Display for DisplayCommaSeparated<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut first = true;
for t in self.0 {
if !first {
f.write_char(',')?;
SpaceOrNewline.fmt(f)?;
}
first = false;
t.fmt(f)?;
}
Ok(())
}
}
/// Displays a whitespace, followed by a comma-separated list that is indented when pretty-printed.
pub(crate) fn indented_list<T: fmt::Display>(f: &mut fmt::Formatter, items: &[T]) -> fmt::Result {
SpaceOrNewline.fmt(f)?;
Indent(DisplayCommaSeparated(items)).fmt(f)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_indent() {
struct TwoLines;
impl Display for TwoLines {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("line 1")?;
SpaceOrNewline.fmt(f)?;
f.write_str("line 2")
}
}
let indent = Indent(TwoLines);
assert_eq!(
indent.to_string(),
TwoLines.to_string(),
"Only the alternate form should be indented"
);
assert_eq!(format!("{:#}", indent), " line 1\n line 2");
}
}

View file

@ -83,6 +83,7 @@ define_keywords!(
ADMIN,
AFTER,
AGAINST,
AGGREGATE,
AGGREGATION,
ALERT,
ALGORITHM,
@ -115,9 +116,11 @@ define_keywords!(
AUTHENTICATION,
AUTHORIZATION,
AUTO,
AUTOEXTEND_SIZE,
AUTOINCREMENT,
AUTO_INCREMENT,
AVG,
AVG_ROW_LENGTH,
AVRO,
BACKWARD,
BASE64,
@ -179,6 +182,7 @@ define_keywords!(
CHARSET,
CHAR_LENGTH,
CHECK,
CHECKSUM,
CIRCLE,
CLEAR,
CLOB,
@ -268,11 +272,13 @@ define_keywords!(
DEFINED,
DEFINER,
DELAYED,
DELAY_KEY_WRITE,
DELETE,
DELIMITED,
DELIMITER,
DELTA,
DENSE_RANK,
DENY,
DEREF,
DESC,
DESCRIBE,
@ -287,6 +293,7 @@ define_keywords!(
DISTRIBUTE,
DIV,
DO,
DOMAIN,
DOUBLE,
DOW,
DOY,
@ -311,6 +318,7 @@ define_keywords!(
END_PARTITION,
ENFORCED,
ENGINE,
ENGINE_ATTRIBUTE,
ENUM,
ENUM16,
ENUM8,
@ -338,6 +346,7 @@ define_keywords!(
EXPLAIN,
EXPLICIT,
EXPORT,
EXTEND,
EXTENDED,
EXTENSION,
EXTERNAL,
@ -386,6 +395,7 @@ define_keywords!(
FUNCTION,
FUNCTIONS,
FUSION,
FUTURE,
GENERAL,
GENERATE,
GENERATED,
@ -441,6 +451,7 @@ define_keywords!(
INPUTFORMAT,
INSENSITIVE,
INSERT,
INSERT_METHOD,
INSTALL,
INSTANT,
INSTEAD,
@ -477,6 +488,7 @@ define_keywords!(
JULIAN,
KEY,
KEYS,
KEY_BLOCK_SIZE,
KILL,
LAG,
LANGUAGE,
@ -530,6 +542,7 @@ define_keywords!(
MAX,
MAXVALUE,
MAX_DATA_EXTENSION_TIME_IN_DAYS,
MAX_ROWS,
MEASURES,
MEDIUMBLOB,
MEDIUMINT,
@ -551,6 +564,7 @@ define_keywords!(
MINUTE,
MINUTES,
MINVALUE,
MIN_ROWS,
MOD,
MODE,
MODIFIES,
@ -633,6 +647,7 @@ define_keywords!(
ORDER,
ORDINALITY,
ORGANIZATION,
OTHER,
OUT,
OUTER,
OUTPUT,
@ -648,6 +663,7 @@ define_keywords!(
OWNERSHIP,
PACKAGE,
PACKAGES,
PACK_KEYS,
PARALLEL,
PARAMETER,
PARQUET,
@ -770,6 +786,7 @@ define_keywords!(
ROW,
ROWID,
ROWS,
ROW_FORMAT,
ROW_NUMBER,
RULE,
RUN,
@ -784,6 +801,7 @@ define_keywords!(
SEARCH,
SECOND,
SECONDARY,
SECONDARY_ENGINE_ATTRIBUTE,
SECONDS,
SECRET,
SECURITY,
@ -798,6 +816,7 @@ define_keywords!(
SERDE,
SERDEPROPERTIES,
SERIALIZABLE,
SERVER,
SERVICE,
SESSION,
SESSION_USER,
@ -828,6 +847,7 @@ define_keywords!(
SQLSTATE,
SQLWARNING,
SQRT,
SRID,
STABLE,
STAGE,
START,
@ -835,12 +855,16 @@ define_keywords!(
STATEMENT,
STATIC,
STATISTICS,
STATS_AUTO_RECALC,
STATS_PERSISTENT,
STATS_SAMPLE_PAGES,
STATUS,
STDDEV_POP,
STDDEV_SAMP,
STDIN,
STDOUT,
STEP,
STORAGE,
STORAGE_INTEGRATION,
STORAGE_SERIALIZATION_POLICY,
STORED,
@ -867,6 +891,7 @@ define_keywords!(
TABLE,
TABLES,
TABLESAMPLE,
TABLESPACE,
TAG,
TARGET,
TASK,
@ -912,6 +937,8 @@ define_keywords!(
TRY,
TRY_CAST,
TRY_CONVERT,
TSQUERY,
TSVECTOR,
TUPLE,
TYPE,
UBIGINT,
@ -955,6 +982,7 @@ define_keywords!(
UUID,
VACUUM,
VALID,
VALIDATE,
VALIDATION_MODE,
VALUE,
VALUES,
@ -982,6 +1010,7 @@ define_keywords!(
WHEN,
WHENEVER,
WHERE,
WHILE,
WIDTH_BUCKET,
WINDOW,
WITH,
@ -989,6 +1018,7 @@ define_keywords!(
WITHOUT,
WITHOUT_ARRAY_WRAPPER,
WORK,
WRAPPER,
WRITE,
XML,
XMLNAMESPACES,
@ -1065,6 +1095,7 @@ pub const RESERVED_FOR_TABLE_ALIAS: &[Keyword] = &[
Keyword::SAMPLE,
Keyword::TABLESAMPLE,
Keyword::FROM,
Keyword::OPEN,
];
/// Can't be used as a column alias, so that `SELECT <expr> alias`

View file

@ -64,6 +64,27 @@
//! // The original SQL text can be generated from the AST
//! assert_eq!(ast[0].to_string(), sql);
//! ```
//!
//! # Pretty Printing
//!
//! SQL statements can be pretty-printed with proper indentation and line breaks using the alternate flag (`{:#}`):
//!
//! ```
//! # use sqlparser::dialect::GenericDialect;
//! # use sqlparser::parser::Parser;
//! let sql = "SELECT a, b FROM table_1";
//! let ast = Parser::parse_sql(&GenericDialect, sql).unwrap();
//!
//! // Pretty print with indentation and line breaks
//! let pretty_sql = format!("{:#}", ast[0]);
//! assert_eq!(pretty_sql, r#"
//! SELECT
//! a,
//! b
//! FROM
//! table_1
//! "#.trim());
//! ```
//! [sqlparser crates.io page]: https://crates.io/crates/sqlparser
//! [`Parser::parse_sql`]: crate::parser::Parser::parse_sql
//! [`Parser::new`]: crate::parser::Parser::new
@ -128,6 +149,10 @@
#![cfg_attr(not(feature = "std"), no_std)]
#![allow(clippy::upper_case_acronyms)]
// Permit large enum variants to keep a unified, expressive AST.
// Splitting complex nodes (expressions, statements, types) into separate types
// would bloat the API and hide intent. Extra memory is a worthwhile tradeoff.
#![allow(clippy::large_enum_variant)]
// Allow proc-macros to find this crate
extern crate self as sqlparser;
@ -142,6 +167,7 @@ extern crate pretty_assertions;
pub mod ast;
#[macro_use]
pub mod dialect;
mod display_utils;
pub mod keywords;
pub mod parser;
pub mod tokenizer;

File diff suppressed because it is too large Load diff

View file

@ -151,6 +151,8 @@ impl TestedDialects {
///
/// 2. re-serializing the result of parsing `sql` produces the same
/// `canonical` sql string
///
/// For multiple statements, use [`statements_parse_to`].
pub fn one_statement_parses_to(&self, sql: &str, canonical: &str) -> Statement {
let mut statements = self.parse_sql_statements(sql).expect(sql);
assert_eq!(statements.len(), 1);
@ -166,6 +168,24 @@ impl TestedDialects {
only_statement
}
/// The same as [`one_statement_parses_to`] but it works for a multiple statements
pub fn statements_parse_to(&self, sql: &str, canonical: &str) -> Vec<Statement> {
let statements = self.parse_sql_statements(sql).expect(sql);
if !canonical.is_empty() && sql != canonical {
assert_eq!(self.parse_sql_statements(canonical).unwrap(), statements);
} else {
assert_eq!(
sql,
statements
.iter()
.map(|s| s.to_string())
.collect::<Vec<_>>()
.join("; ")
);
}
statements
}
/// Ensures that `sql` parses as an [`Expr`], and that
/// re-serializing the parse result produces canonical
pub fn expr_parses_to(&self, sql: &str, canonical: &str) -> Expr {
@ -250,7 +270,7 @@ impl TestedDialects {
tokenizer = tokenizer.with_unescape(options.unescape);
}
let tokens = tokenizer.tokenize().unwrap();
assert_eq!(expected, tokens, "Tokenized differently for {:?}", dialect);
assert_eq!(expected, tokens, "Tokenized differently for {dialect:?}");
});
}
}
@ -325,10 +345,12 @@ pub fn alter_table_op_with_name(stmt: Statement, expected_name: &str) -> AlterTa
operations,
on_cluster: _,
location: _,
iceberg,
} => {
assert_eq!(name.to_string(), expected_name);
assert!(!if_exists);
assert!(!is_only);
assert!(!iceberg);
only(operations)
}
_ => panic!("Expected ALTER TABLE statement"),
@ -344,6 +366,11 @@ pub fn number(n: &str) -> Value {
Value::Number(n.parse().unwrap(), false)
}
/// Creates a [Value::SingleQuotedString]
pub fn single_quoted_string(s: impl Into<String>) -> Value {
Value::SingleQuotedString(s.into())
}
pub fn table_alias(name: impl Into<String>) -> Option<TableAlias> {
Some(TableAlias {
name: Ident::new(name),
@ -426,3 +453,52 @@ pub fn call(function: &str, args: impl IntoIterator<Item = Expr>) -> Expr {
within_group: vec![],
})
}
/// Gets the first index column (mysql calls it a key part) of the first index found in a
/// [`Statement::CreateIndex`], [`Statement::CreateTable`], or [`Statement::AlterTable`].
pub fn index_column(stmt: Statement) -> Expr {
match stmt {
Statement::CreateIndex(CreateIndex { columns, .. }) => {
columns.first().unwrap().column.expr.clone()
}
Statement::CreateTable(CreateTable { constraints, .. }) => {
match constraints.first().unwrap() {
TableConstraint::Index { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::Unique { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::PrimaryKey { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::FulltextOrSpatial { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
}
}
Statement::AlterTable { operations, .. } => match operations.first().unwrap() {
AlterTableOperation::AddConstraint { constraint, .. } => {
match constraint {
TableConstraint::Index { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::Unique { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::PrimaryKey { columns, .. } => {
columns.first().unwrap().column.expr.clone()
}
TableConstraint::FulltextOrSpatial {
columns,
..
} => columns.first().unwrap().column.expr.clone(),
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
}
}
_ => panic!("Expected a constraint"),
},
_ => panic!("Expected CREATE INDEX, ALTER TABLE, or CREATE TABLE, got: {stmt:?}"),
}
}

View file

@ -246,6 +246,8 @@ pub enum Token {
ShiftLeftVerticalBar,
/// `|>> PostgreSQL/Redshift geometrical binary operator (Is strictly above?)
VerticalBarShiftRight,
/// `|> BigQuery pipe operator
VerticalBarRightAngleBracket,
/// `#>>`, extracts JSON sub-object at the specified path as text
HashLongArrow,
/// jsonb @> jsonb -> boolean: Test whether left json contains the right json
@ -359,6 +361,7 @@ impl fmt::Display for Token {
Token::AmpersandRightAngleBracket => f.write_str("&>"),
Token::AmpersandLeftAngleBracketVerticalBar => f.write_str("&<|"),
Token::VerticalBarAmpersandRightAngleBracket => f.write_str("|&>"),
Token::VerticalBarRightAngleBracket => f.write_str("|>"),
Token::TwoWayArrow => f.write_str("<->"),
Token::LeftAngleBracketCaret => f.write_str("<^"),
Token::RightAngleBracketCaret => f.write_str(">^"),
@ -1188,6 +1191,22 @@ impl<'a> Tokenizer<'a> {
}
// numbers and period
'0'..='9' | '.' => {
// special case where if ._ is encountered after a word then that word
// is a table and the _ is the start of the col name.
// if the prev token is not a word, then this is not a valid sql
// word or number.
if ch == '.' && chars.peekable.clone().nth(1) == Some('_') {
if let Some(Token::Word(_)) = prev_token {
chars.next();
return Ok(Some(Token::Period));
}
return self.tokenizer_error(
chars.location(),
"Unexpected character '_'".to_string(),
);
}
// Some dialects support underscore as number separator
// There can only be one at a time and it must be followed by another digit
let is_number_separator = |ch: char, next_char: Option<char>| {
@ -1403,6 +1422,9 @@ impl<'a> Tokenizer<'a> {
_ => self.start_binop_opt(chars, "|>", None),
}
}
Some('>') if self.dialect.supports_pipe_operator() => {
self.consume_for_binop(chars, "|>", Token::VerticalBarRightAngleBracket)
}
// Bitshift '|' operator
_ => self.start_binop(chars, "|", Token::Pipe),
}
@ -1729,7 +1751,7 @@ impl<'a> Tokenizer<'a> {
(None, Some(tok)) => Ok(Some(tok)),
(None, None) => self.tokenizer_error(
chars.location(),
format!("Expected a valid binary operator after '{}'", prefix),
format!("Expected a valid binary operator after '{prefix}'"),
),
}
}
@ -1787,7 +1809,7 @@ impl<'a> Tokenizer<'a> {
chars.next();
let mut temp = String::new();
let end_delimiter = format!("${}$", value);
let end_delimiter = format!("${value}$");
loop {
match chars.next() {
@ -2380,13 +2402,13 @@ fn take_char_from_hex_digits(
location: chars.location(),
})?;
let digit = next_char.to_digit(16).ok_or_else(|| TokenizerError {
message: format!("Invalid hex digit in escaped unicode string: {}", next_char),
message: format!("Invalid hex digit in escaped unicode string: {next_char}"),
location: chars.location(),
})?;
result = result * 16 + digit;
}
char::from_u32(result).ok_or_else(|| TokenizerError {
message: format!("Invalid unicode character: {:x}", result),
message: format!("Invalid unicode character: {result:x}"),
location: chars.location(),
})
}
@ -3482,7 +3504,7 @@ mod tests {
}
fn check_unescape(s: &str, expected: Option<&str>) {
let s = format!("'{}'", s);
let s = format!("'{s}'");
let mut state = State {
peekable: s.chars().peekable(),
line: 0,
@ -4012,4 +4034,40 @@ mod tests {
],
);
}
#[test]
fn tokenize_period_underscore() {
let sql = String::from("SELECT table._col");
// a dialect that supports underscores in numeric literals
let dialect = PostgreSqlDialect {};
let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap();
let expected = vec![
Token::make_keyword("SELECT"),
Token::Whitespace(Whitespace::Space),
Token::Word(Word {
value: "table".to_string(),
quote_style: None,
keyword: Keyword::TABLE,
}),
Token::Period,
Token::Word(Word {
value: "_col".to_string(),
quote_style: None,
keyword: Keyword::NoKeyword,
}),
];
compare(expected, tokens);
let sql = String::from("SELECT ._123");
if let Ok(tokens) = Tokenizer::new(&dialect, &sql).tokenize() {
panic!("Tokenizer should have failed on {sql}, but it succeeded with {tokens:?}");
}
let sql = String::from("SELECT ._abc");
if let Ok(tokens) = Tokenizer::new(&dialect, &sql).tokenize() {
panic!("Tokenizer should have failed on {sql}, but it succeeded with {tokens:?}");
}
}
}

414
tests/pretty_print.rs Normal file
View file

@ -0,0 +1,414 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use sqlparser::dialect::GenericDialect;
use sqlparser::parser::Parser;
fn prettify(sql: &str) -> String {
let ast = Parser::parse_sql(&GenericDialect {}, sql).unwrap();
format!("{:#}", ast[0])
}
#[test]
fn test_pretty_print_select() {
assert_eq!(
prettify("SELECT a, b, c FROM my_table WHERE x = 1 AND y = 2"),
r#"
SELECT
a,
b,
c
FROM
my_table
WHERE
x = 1 AND y = 2
"#
.trim()
);
}
#[test]
fn test_pretty_print_join() {
assert_eq!(
prettify("SELECT a FROM table1 JOIN table2 ON table1.id = table2.id"),
r#"
SELECT
a
FROM
table1
JOIN table2 ON table1.id = table2.id
"#
.trim()
);
}
#[test]
fn test_pretty_print_subquery() {
assert_eq!(
prettify("SELECT * FROM (SELECT a, b FROM my_table) AS subquery"),
r#"
SELECT
*
FROM
(
SELECT
a,
b
FROM
my_table
) AS subquery
"#
.trim()
);
}
#[test]
fn test_pretty_print_union() {
assert_eq!(
prettify("SELECT a FROM table1 UNION SELECT b FROM table2"),
r#"
SELECT
a
FROM
table1
UNION
SELECT
b
FROM
table2
"#
.trim()
);
}
#[test]
fn test_pretty_print_group_by() {
assert_eq!(
prettify("SELECT a, COUNT(*) FROM my_table GROUP BY a HAVING COUNT(*) > 1"),
r#"
SELECT
a,
COUNT(*)
FROM
my_table
GROUP BY
a
HAVING
COUNT(*) > 1
"#
.trim()
);
}
#[test]
fn test_pretty_print_cte() {
assert_eq!(
prettify("WITH cte AS (SELECT a, b FROM my_table) SELECT * FROM cte"),
r#"
WITH cte AS (
SELECT
a,
b
FROM
my_table
)
SELECT
*
FROM
cte
"#
.trim()
);
}
#[test]
fn test_pretty_print_case_when() {
assert_eq!(
prettify("SELECT CASE WHEN x > 0 THEN 'positive' WHEN x < 0 THEN 'negative' ELSE 'zero' END FROM my_table"),
r#"
SELECT
CASE
WHEN x > 0 THEN
'positive'
WHEN x < 0 THEN
'negative'
ELSE
'zero'
END
FROM
my_table
"#.trim()
);
}
#[test]
fn test_pretty_print_window_function() {
assert_eq!(
prettify("SELECT id, value, ROW_NUMBER() OVER (PARTITION BY category ORDER BY value DESC) as rank FROM my_table"),
r#"
SELECT
id,
value,
ROW_NUMBER() OVER (
PARTITION BY category
ORDER BY value DESC
) AS rank
FROM
my_table
"#.trim()
);
}
#[test]
fn test_pretty_print_multiline_string() {
assert_eq!(
prettify("SELECT 'multiline\nstring' AS str"),
r#"
SELECT
'multiline
string' AS str
"#
.trim(),
"A literal string with a newline should be kept as is. The contents of the string should not be indented."
);
}
#[test]
fn test_pretty_print_insert_values() {
assert_eq!(
prettify("INSERT INTO my_table (a, b, c) VALUES (1, 2, 3), (4, 5, 6)"),
r#"
INSERT INTO my_table (a, b, c)
VALUES
(1, 2, 3),
(4, 5, 6)
"#
.trim()
);
}
#[test]
fn test_pretty_print_insert_select() {
assert_eq!(
prettify("INSERT INTO my_table (a, b) SELECT x, y FROM source_table RETURNING a AS id"),
r#"
INSERT INTO my_table (a, b)
SELECT
x,
y
FROM
source_table
RETURNING
a AS id
"#
.trim()
);
}
#[test]
fn test_pretty_print_update() {
assert_eq!(
prettify("UPDATE my_table SET a = 1, b = 2 WHERE x > 0 RETURNING id, name"),
r#"
UPDATE my_table
SET
a = 1,
b = 2
WHERE
x > 0
RETURNING
id,
name
"#
.trim()
);
}
#[test]
fn test_pretty_print_delete() {
assert_eq!(
prettify("DELETE FROM my_table WHERE x > 0 RETURNING id, name"),
r#"
DELETE FROM
my_table
WHERE
x > 0
RETURNING
id,
name
"#
.trim()
);
assert_eq!(
prettify("DELETE table1, table2"),
r#"
DELETE
table1,
table2
"#
.trim()
);
}
#[test]
fn test_pretty_print_create_table() {
assert_eq!(
prettify("CREATE TABLE my_table (id INT PRIMARY KEY, name VARCHAR(255) NOT NULL, CONSTRAINT fk_other FOREIGN KEY (id) REFERENCES other_table(id))"),
r#"
CREATE TABLE my_table (
id INT PRIMARY KEY,
name VARCHAR(255) NOT NULL,
CONSTRAINT fk_other FOREIGN KEY (id) REFERENCES other_table(id)
)
"#
.trim()
);
}
#[test]
fn test_pretty_print_create_view() {
assert_eq!(
prettify("CREATE VIEW my_view AS SELECT a, b FROM my_table WHERE x > 0"),
r#"
CREATE VIEW my_view AS
SELECT
a,
b
FROM
my_table
WHERE
x > 0
"#
.trim()
);
}
#[test]
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
fn test_pretty_print_create_function() {
assert_eq!(
prettify("CREATE FUNCTION my_func() RETURNS INT BEGIN SELECT COUNT(*) INTO @count FROM my_table; RETURN @count; END"),
r#"
CREATE FUNCTION my_func() RETURNS INT
BEGIN
SELECT COUNT(*) INTO @count FROM my_table;
RETURN @count;
END
"#
.trim()
);
}
#[test]
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
fn test_pretty_print_json_table() {
assert_eq!(
prettify("SELECT * FROM JSON_TABLE(@json, '$[*]' COLUMNS (id INT PATH '$.id', name VARCHAR(255) PATH '$.name')) AS jt"),
r#"
SELECT
*
FROM
JSON_TABLE(
@json,
'$[*]' COLUMNS (
id INT PATH '$.id',
name VARCHAR(255) PATH '$.name'
)
) AS jt
"#
.trim()
);
}
#[test]
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
fn test_pretty_print_transaction_blocks() {
assert_eq!(
prettify("BEGIN; UPDATE my_table SET x = 1; COMMIT;"),
r#"
BEGIN;
UPDATE my_table SET x = 1;
COMMIT;
"#
.trim()
);
}
#[test]
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
fn test_pretty_print_control_flow() {
assert_eq!(
prettify("IF x > 0 THEN SELECT 'positive'; ELSE SELECT 'negative'; END IF;"),
r#"
IF x > 0 THEN
SELECT 'positive';
ELSE
SELECT 'negative';
END IF;
"#
.trim()
);
}
#[test]
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
fn test_pretty_print_merge() {
assert_eq!(
prettify("MERGE INTO target_table t USING source_table s ON t.id = s.id WHEN MATCHED THEN UPDATE SET t.value = s.value WHEN NOT MATCHED THEN INSERT (id, value) VALUES (s.id, s.value)"),
r#"
MERGE INTO target_table t
USING source_table s ON t.id = s.id
WHEN MATCHED THEN
UPDATE SET t.value = s.value
WHEN NOT MATCHED THEN
INSERT (id, value) VALUES (s.id, s.value)
"#
.trim()
);
}
#[test]
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
fn test_pretty_print_create_index() {
assert_eq!(
prettify("CREATE INDEX idx_name ON my_table (column1, column2)"),
r#"
CREATE INDEX idx_name
ON my_table (column1, column2)
"#
.trim()
);
}
#[test]
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
fn test_pretty_print_explain() {
assert_eq!(
prettify("EXPLAIN ANALYZE SELECT * FROM my_table WHERE x > 0"),
r#"
EXPLAIN ANALYZE
SELECT
*
FROM
my_table
WHERE
x > 0
"#
.trim()
);
}

View file

@ -261,10 +261,10 @@ fn parse_at_at_identifier() {
#[test]
fn parse_begin() {
let sql = r#"BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT 2; END"#;
let sql = r#"BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT 2; RAISE USING MESSAGE = FORMAT('ERR: %s', 'Bad'); END"#;
let Statement::StartTransaction {
statements,
exception_statements,
exception,
has_end_keyword,
..
} = bigquery().verified_stmt(sql)
@ -272,7 +272,10 @@ fn parse_begin() {
unreachable!();
};
assert_eq!(1, statements.len());
assert_eq!(1, exception_statements.unwrap().len());
assert!(exception.is_some());
let exception = exception.unwrap();
assert_eq!(1, exception.len());
assert!(has_end_keyword);
bigquery().verified_stmt(
@ -352,14 +355,16 @@ fn parse_create_view_with_options() {
ViewColumnDef {
name: Ident::new("age"),
data_type: None,
options: Some(vec![ColumnOption::Options(vec![SqlOption::KeyValue {
key: Ident::new("description"),
value: Expr::Value(
Value::DoubleQuotedString("field age".to_string()).with_span(
Span::new(Location::new(1, 42), Location::new(1, 52))
)
),
}])]),
options: Some(ColumnOptions::CommaSeparated(vec![ColumnOption::Options(
vec![SqlOption::KeyValue {
key: Ident::new("description"),
value: Expr::Value(
Value::DoubleQuotedString("field age".to_string()).with_span(
Span::new(Location::new(1, 42), Location::new(1, 52))
)
),
}]
)])),
},
],
columns
@ -484,7 +489,7 @@ fn parse_create_table_with_options() {
columns,
partition_by,
cluster_by,
options,
table_options,
..
}) => {
assert_eq!(
@ -536,10 +541,10 @@ fn parse_create_table_with_options() {
(
Some(Box::new(Expr::Identifier(Ident::new("_PARTITIONDATE")))),
Some(WrappedCollection::NoWrapping(vec![
Ident::new("userid"),
Ident::new("age"),
Expr::Identifier(Ident::new("userid")),
Expr::Identifier(Ident::new("age")),
])),
Some(vec![
CreateTableOptions::Options(vec![
SqlOption::KeyValue {
key: Ident::new("partition_expiration_days"),
value: Expr::Value(
@ -561,7 +566,7 @@ fn parse_create_table_with_options() {
},
])
),
(partition_by, cluster_by, options)
(partition_by, cluster_by, table_options)
)
}
_ => unreachable!(),
@ -601,11 +606,13 @@ fn parse_nested_data_types() {
field_name: Some("a".into()),
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(
Box::new(DataType::Int64,)
))
)),
options: None,
},
StructField {
field_name: Some("b".into()),
field_type: DataType::Bytes(Some(42))
field_type: DataType::Bytes(Some(42)),
options: None,
},
],
StructBracketKind::AngleBrackets
@ -619,6 +626,7 @@ fn parse_nested_data_types() {
vec![StructField {
field_name: None,
field_type: DataType::Int64,
options: None,
}],
StructBracketKind::AngleBrackets
),
@ -632,35 +640,6 @@ fn parse_nested_data_types() {
}
}
#[test]
fn parse_invalid_brackets() {
let sql = "SELECT STRUCT<INT64>>(NULL)";
assert_eq!(
bigquery_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError::ParserError("unmatched > in STRUCT literal".to_string())
);
let sql = "SELECT STRUCT<STRUCT<INT64>>>(NULL)";
assert_eq!(
bigquery_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError::ParserError("Expected: (, found: >".to_string())
);
let sql = "CREATE TABLE table (x STRUCT<STRUCT<INT64>>>)";
assert_eq!(
bigquery_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError::ParserError(
"Expected: ',' or ')' after column definition, found: >".to_string()
)
);
}
#[test]
fn parse_tuple_struct_literal() {
// tuple syntax: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#tuple_syntax
@ -771,6 +750,7 @@ fn parse_typed_struct_syntax_bigquery() {
fields: vec![StructField {
field_name: None,
field_type: DataType::Int64,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -799,7 +779,8 @@ fn parse_typed_struct_syntax_bigquery() {
quote_style: None,
span: Span::empty(),
}),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
},
StructField {
field_name: Some(Ident {
@ -807,7 +788,8 @@ fn parse_typed_struct_syntax_bigquery() {
quote_style: None,
span: Span::empty(),
}),
field_type: DataType::String(None)
field_type: DataType::String(None),
options: None,
},
]
},
@ -825,17 +807,20 @@ fn parse_typed_struct_syntax_bigquery() {
field_name: Some("arr".into()),
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
DataType::Float64
)))
))),
options: None,
},
StructField {
field_name: Some("str".into()),
field_type: DataType::Struct(
vec![StructField {
field_name: None,
field_type: DataType::Bool
field_type: DataType::Bool,
options: None,
}],
StructBracketKind::AngleBrackets
)
),
options: None,
},
]
},
@ -858,13 +843,15 @@ fn parse_typed_struct_syntax_bigquery() {
field_type: DataType::Struct(
Default::default(),
StructBracketKind::AngleBrackets
)
),
options: None,
},
StructField {
field_name: Some("y".into()),
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
DataType::Struct(Default::default(), StructBracketKind::AngleBrackets)
)))
))),
options: None,
},
]
},
@ -879,7 +866,8 @@ fn parse_typed_struct_syntax_bigquery() {
values: vec![Expr::Value(Value::Boolean(true).with_empty_span())],
fields: vec![StructField {
field_name: None,
field_type: DataType::Bool
field_type: DataType::Bool,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -891,7 +879,8 @@ fn parse_typed_struct_syntax_bigquery() {
)],
fields: vec![StructField {
field_name: None,
field_type: DataType::Bytes(Some(42))
field_type: DataType::Bytes(Some(42)),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -907,7 +896,8 @@ fn parse_typed_struct_syntax_bigquery() {
)],
fields: vec![StructField {
field_name: None,
field_type: DataType::Date
field_type: DataType::Date,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -916,11 +906,15 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Datetime(None),
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()),
span: Span::empty(),
},
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Datetime(None)
field_type: DataType::Datetime(None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -930,7 +924,8 @@ fn parse_typed_struct_syntax_bigquery() {
values: vec![Expr::value(number("5.0"))],
fields: vec![StructField {
field_name: None,
field_type: DataType::Float64
field_type: DataType::Float64,
options: None,
}]
},
expr_from_projection(&select.projection[2])
@ -940,7 +935,8 @@ fn parse_typed_struct_syntax_bigquery() {
values: vec![Expr::value(number("1"))],
fields: vec![StructField {
field_name: None,
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
}]
},
expr_from_projection(&select.projection[3])
@ -962,7 +958,8 @@ fn parse_typed_struct_syntax_bigquery() {
})],
fields: vec![StructField {
field_name: None,
field_type: DataType::Interval
field_type: DataType::Interval,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -971,13 +968,17 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::JSON,
value: Value::SingleQuotedString(
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
)
value: ValueWithSpan {
value: Value::SingleQuotedString(
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::JSON
field_type: DataType::JSON,
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -993,7 +994,8 @@ fn parse_typed_struct_syntax_bigquery() {
)],
fields: vec![StructField {
field_name: None,
field_type: DataType::String(Some(42))
field_type: DataType::String(Some(42)),
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1002,11 +1004,17 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Timestamp(None, TimezoneInfo::None),
value: Value::SingleQuotedString("2008-12-25 15:30:00 America/Los_Angeles".into())
value: ValueWithSpan {
value: Value::SingleQuotedString(
"2008-12-25 15:30:00 America/Los_Angeles".into()
),
span: Span::empty(),
},
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Timestamp(None, TimezoneInfo::None)
field_type: DataType::Timestamp(None, TimezoneInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1016,11 +1024,15 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Time(None, TimezoneInfo::None),
value: Value::SingleQuotedString("15:30:00".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("15:30:00".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Time(None, TimezoneInfo::None)
field_type: DataType::Time(None, TimezoneInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[2])
@ -1033,11 +1045,15 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Numeric(ExactNumberInfo::None),
value: Value::SingleQuotedString("1".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Numeric(ExactNumberInfo::None)
field_type: DataType::Numeric(ExactNumberInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1046,11 +1062,15 @@ fn parse_typed_struct_syntax_bigquery() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::BigNumeric(ExactNumberInfo::None),
value: Value::SingleQuotedString("1".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::BigNumeric(ExactNumberInfo::None)
field_type: DataType::BigNumeric(ExactNumberInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1067,10 +1087,12 @@ fn parse_typed_struct_syntax_bigquery() {
StructField {
field_name: Some("key".into()),
field_type: DataType::Int64,
options: None,
},
StructField {
field_name: Some("value".into()),
field_type: DataType::Int64,
options: None,
},
]
},
@ -1092,6 +1114,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
fields: vec![StructField {
field_name: None,
field_type: DataType::Int64,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1120,7 +1143,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
quote_style: None,
span: Span::empty(),
}),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
},
StructField {
field_name: Some(Ident {
@ -1128,7 +1152,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
quote_style: None,
span: Span::empty(),
}),
field_type: DataType::String(None)
field_type: DataType::String(None),
options: None,
},
]
},
@ -1151,13 +1176,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
field_type: DataType::Struct(
Default::default(),
StructBracketKind::AngleBrackets
)
),
options: None,
},
StructField {
field_name: Some("y".into()),
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
DataType::Struct(Default::default(), StructBracketKind::AngleBrackets)
)))
))),
options: None,
},
]
},
@ -1172,7 +1199,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
values: vec![Expr::Value(Value::Boolean(true).with_empty_span())],
fields: vec![StructField {
field_name: None,
field_type: DataType::Bool
field_type: DataType::Bool,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1184,7 +1212,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
)],
fields: vec![StructField {
field_name: None,
field_type: DataType::Bytes(Some(42))
field_type: DataType::Bytes(Some(42)),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1200,7 +1229,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
)],
fields: vec![StructField {
field_name: None,
field_type: DataType::Date
field_type: DataType::Date,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1209,11 +1239,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Datetime(None),
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Datetime(None)
field_type: DataType::Datetime(None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1223,7 +1257,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
values: vec![Expr::value(number("5.0"))],
fields: vec![StructField {
field_name: None,
field_type: DataType::Float64
field_type: DataType::Float64,
options: None,
}]
},
expr_from_projection(&select.projection[2])
@ -1233,7 +1268,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
values: vec![Expr::value(number("1"))],
fields: vec![StructField {
field_name: None,
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
}]
},
expr_from_projection(&select.projection[3])
@ -1255,7 +1291,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
})],
fields: vec![StructField {
field_name: None,
field_type: DataType::Interval
field_type: DataType::Interval,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1264,13 +1301,17 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::JSON,
value: Value::SingleQuotedString(
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
)
value: ValueWithSpan {
value: Value::SingleQuotedString(
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::JSON
field_type: DataType::JSON,
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1286,7 +1327,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
)],
fields: vec![StructField {
field_name: None,
field_type: DataType::String(Some(42))
field_type: DataType::String(Some(42)),
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1295,11 +1337,17 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Timestamp(None, TimezoneInfo::None),
value: Value::SingleQuotedString("2008-12-25 15:30:00 America/Los_Angeles".into())
value: ValueWithSpan {
value: Value::SingleQuotedString(
"2008-12-25 15:30:00 America/Los_Angeles".into()
),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Timestamp(None, TimezoneInfo::None)
field_type: DataType::Timestamp(None, TimezoneInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1309,11 +1357,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Time(None, TimezoneInfo::None),
value: Value::SingleQuotedString("15:30:00".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("15:30:00".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Time(None, TimezoneInfo::None)
field_type: DataType::Time(None, TimezoneInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[2])
@ -1326,11 +1378,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::Numeric(ExactNumberInfo::None),
value: Value::SingleQuotedString("1".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::Numeric(ExactNumberInfo::None)
field_type: DataType::Numeric(ExactNumberInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1339,11 +1395,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
&Expr::Struct {
values: vec![Expr::TypedString {
data_type: DataType::BigNumeric(ExactNumberInfo::None),
value: Value::SingleQuotedString("1".into())
value: ValueWithSpan {
value: Value::SingleQuotedString("1".into()),
span: Span::empty(),
}
}],
fields: vec![StructField {
field_name: None,
field_type: DataType::BigNumeric(ExactNumberInfo::None)
field_type: DataType::BigNumeric(ExactNumberInfo::None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1360,7 +1420,8 @@ fn parse_typed_struct_with_field_name_bigquery() {
values: vec![Expr::value(number("5"))],
fields: vec![StructField {
field_name: Some(Ident::from("x")),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1372,7 +1433,8 @@ fn parse_typed_struct_with_field_name_bigquery() {
)],
fields: vec![StructField {
field_name: Some(Ident::from("y")),
field_type: DataType::String(None)
field_type: DataType::String(None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1387,11 +1449,13 @@ fn parse_typed_struct_with_field_name_bigquery() {
fields: vec![
StructField {
field_name: Some(Ident::from("x")),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
},
StructField {
field_name: Some(Ident::from("y")),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
}
]
},
@ -1409,7 +1473,8 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() {
values: vec![Expr::value(number("5"))],
fields: vec![StructField {
field_name: Some(Ident::from("x")),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
}]
},
expr_from_projection(&select.projection[0])
@ -1421,7 +1486,8 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() {
)],
fields: vec![StructField {
field_name: Some(Ident::from("y")),
field_type: DataType::String(None)
field_type: DataType::String(None),
options: None,
}]
},
expr_from_projection(&select.projection[1])
@ -1436,11 +1502,13 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() {
fields: vec![
StructField {
field_name: Some(Ident::from("x")),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
},
StructField {
field_name: Some(Ident::from("y")),
field_type: DataType::Int64
field_type: DataType::Int64,
options: None,
}
]
},
@ -2313,16 +2381,46 @@ fn bigquery_select_expr_star() {
#[test]
fn test_select_as_struct() {
bigquery().verified_only_select("SELECT * FROM (SELECT AS VALUE STRUCT(123 AS a, false AS b))");
for (sql, parse_to) in [
(
"SELECT * FROM (SELECT AS STRUCT STRUCT(123 AS a, false AS b))",
"SELECT * FROM (SELECT AS STRUCT STRUCT(123 AS a, false AS b))",
),
(
"SELECT * FROM (SELECT DISTINCT AS STRUCT STRUCT(123 AS a, false AS b))",
"SELECT * FROM (SELECT DISTINCT AS STRUCT STRUCT(123 AS a, false AS b))",
),
(
"SELECT * FROM (SELECT ALL AS STRUCT STRUCT(123 AS a, false AS b))",
"SELECT * FROM (SELECT AS STRUCT STRUCT(123 AS a, false AS b))",
),
] {
bigquery().one_statement_parses_to(sql, parse_to);
}
let select = bigquery().verified_only_select("SELECT AS STRUCT 1 AS a, 2 AS b");
assert_eq!(Some(ValueTableMode::AsStruct), select.value_table_mode);
}
#[test]
fn test_select_as_value() {
bigquery().verified_only_select(
"SELECT * FROM (SELECT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
);
for (sql, parse_to) in [
(
"SELECT * FROM (SELECT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
"SELECT * FROM (SELECT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
),
(
"SELECT * FROM (SELECT DISTINCT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
"SELECT * FROM (SELECT DISTINCT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
),
(
"SELECT * FROM (SELECT ALL AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
"SELECT * FROM (SELECT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))",
),
] {
bigquery().one_statement_parses_to(sql, parse_to);
}
let select = bigquery().verified_only_select("SELECT AS VALUE STRUCT(1 AS a, 2 AS b) AS xyz");
assert_eq!(Some(ValueTableMode::AsValue), select.value_table_mode);
}
@ -2335,7 +2433,10 @@ fn test_triple_quote_typed_strings() {
assert_eq!(
Expr::TypedString {
data_type: DataType::JSON,
value: Value::TripleDoubleQuotedString(r#"{"foo":"bar's"}"#.into())
value: ValueWithSpan {
value: Value::TripleDoubleQuotedString(r#"{"foo":"bar's"}"#.into()),
span: Span::empty(),
}
},
expr
);
@ -2377,3 +2478,91 @@ fn test_any_type() {
fn test_any_type_dont_break_custom_type() {
bigquery_and_generic().verified_stmt("CREATE TABLE foo (x ANY)");
}
#[test]
fn test_struct_field_options() {
bigquery().verified_stmt(concat!(
"CREATE TABLE my_table (",
"f0 STRUCT<a STRING, b INT64>, ",
"f1 STRUCT<",
"a STRING OPTIONS(description = 'This is a string', type = 'string'), ",
"b INT64",
"> OPTIONS(description = 'This is a struct field')",
")",
));
}
#[test]
fn test_struct_trailing_and_nested_bracket() {
bigquery().verified_stmt(concat!(
"CREATE TABLE my_table (",
"f0 STRING, ",
"f1 STRUCT<a STRING, b STRUCT<c INT64, d STRING>>, ",
"f2 STRING",
")",
));
// More complex nested structs
bigquery().verified_stmt(concat!(
"CREATE TABLE my_table (",
"f0 STRING, ",
"f1 STRUCT<a STRING, b STRUCT<c INT64, d STRUCT<e STRING>>>, ",
"f2 STRUCT<h STRING, i STRUCT<j INT64, k STRUCT<l STRUCT<m STRING>>>>, ",
"f3 STRUCT<e STRING, f STRUCT<c INT64>>",
")",
));
// Bad case with missing closing bracket
assert_eq!(
ParserError::ParserError("Expected: >, found: )".to_owned()),
bigquery()
.parse_sql_statements("CREATE TABLE my_table(f1 STRUCT<a STRING, b INT64)")
.unwrap_err()
);
// Bad case with redundant closing bracket
assert_eq!(
ParserError::ParserError(
"unmatched > after parsing data type STRUCT<a STRING, b INT64>)".to_owned()
),
bigquery()
.parse_sql_statements("CREATE TABLE my_table(f1 STRUCT<a STRING, b INT64>>)")
.unwrap_err()
);
// Base case with redundant closing bracket in nested struct
assert_eq!(
ParserError::ParserError(
"Expected: ',' or ')' after column definition, found: >".to_owned()
),
bigquery()
.parse_sql_statements("CREATE TABLE my_table(f1 STRUCT<a STRUCT<b INT>>>, c INT64)")
.unwrap_err()
);
let sql = "SELECT STRUCT<INT64>>(NULL)";
assert_eq!(
bigquery_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError::ParserError("unmatched > in STRUCT literal".to_string())
);
let sql = "SELECT STRUCT<STRUCT<INT64>>>(NULL)";
assert_eq!(
bigquery_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError::ParserError("Expected: (, found: >".to_string())
);
let sql = "CREATE TABLE table (x STRUCT<STRUCT<INT64>>>)";
assert_eq!(
bigquery_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError::ParserError(
"Expected: ',' or ')' after column definition, found: >".to_string()
)
);
}

View file

@ -28,7 +28,7 @@ use test_utils::*;
use sqlparser::ast::Expr::{BinaryOp, Identifier};
use sqlparser::ast::SelectItem::UnnamedExpr;
use sqlparser::ast::TableFactor::Table;
use sqlparser::ast::Value::Number;
use sqlparser::ast::Value::Boolean;
use sqlparser::ast::*;
use sqlparser::dialect::ClickHouseDialect;
use sqlparser::dialect::GenericDialect;
@ -219,10 +219,14 @@ fn parse_delimited_identifiers() {
#[test]
fn parse_create_table() {
clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE=MergeTree ORDER BY ("x")"#);
clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE=MergeTree ORDER BY "x""#);
clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY ("x")"#);
clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x""#);
clickhouse().verified_stmt(
r#"CREATE TABLE "x" ("a" "int") ENGINE=MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#,
r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#,
);
clickhouse().one_statement_parses_to(
"CREATE TABLE x (a int) ENGINE = MergeTree() ORDER BY a",
"CREATE TABLE x (a INT) ENGINE = MergeTree ORDER BY a",
);
}
@ -589,7 +593,7 @@ fn parse_clickhouse_data_types() {
#[test]
fn parse_create_table_with_nullable() {
let sql = r#"CREATE TABLE table (k UInt8, `a` Nullable(String), `b` Nullable(DateTime64(9, 'UTC')), c Nullable(DateTime64(9)), d Date32 NULL) ENGINE=MergeTree ORDER BY (`k`)"#;
let sql = r#"CREATE TABLE table (k UInt8, `a` Nullable(String), `b` Nullable(DateTime64(9, 'UTC')), c Nullable(DateTime64(9)), d Date32 NULL) ENGINE = MergeTree ORDER BY (`k`)"#;
// ClickHouse has a case-sensitive definition of data type, but canonical representation is not
let canonical_sql = sql.replace("String", "STRING");
@ -669,11 +673,13 @@ fn parse_create_table_with_nested_data_types() {
DataType::Tuple(vec![
StructField {
field_name: None,
field_type: DataType::FixedString(128)
field_type: DataType::FixedString(128),
options: None,
},
StructField {
field_name: None,
field_type: DataType::Int128
field_type: DataType::Int128,
options: None,
}
])
))),
@ -685,12 +691,14 @@ fn parse_create_table_with_nested_data_types() {
StructField {
field_name: Some("a".into()),
field_type: DataType::Datetime64(9, None),
options: None,
},
StructField {
field_name: Some("b".into()),
field_type: DataType::Array(ArrayElemTypeDef::Parenthesis(
Box::new(DataType::Uuid)
))
)),
options: None,
},
]),
options: vec![],
@ -714,14 +722,14 @@ fn parse_create_table_with_nested_data_types() {
fn parse_create_table_with_primary_key() {
match clickhouse_and_generic().verified_stmt(concat!(
r#"CREATE TABLE db.table (`i` INT, `k` INT)"#,
" ENGINE=SharedMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')",
" ENGINE = SharedMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')",
" PRIMARY KEY tuple(i)",
" ORDER BY tuple(i)",
)) {
Statement::CreateTable(CreateTable {
name,
columns,
engine,
table_options,
primary_key,
order_by,
..
@ -742,16 +750,23 @@ fn parse_create_table_with_primary_key() {
],
columns
);
assert_eq!(
engine,
Some(TableEngine {
name: "SharedMergeTree".to_string(),
parameters: Some(vec![
let plain_options = match table_options {
CreateTableOptions::Plain(options) => options,
_ => unreachable!(),
};
assert!(plain_options.contains(&SqlOption::NamedParenthesizedList(
NamedParenthesizedList {
key: Ident::new("ENGINE"),
name: Some(Ident::new("SharedMergeTree")),
values: vec![
Ident::with_quote('\'', "/clickhouse/tables/{uuid}/{shard}"),
Ident::with_quote('\'', "{replica}"),
]),
})
);
]
}
)));
fn assert_function(actual: &Function, name: &str, arg: &str) -> bool {
assert_eq!(actual.name, ObjectName::from(vec![Ident::new(name)]));
assert_eq!(
@ -798,7 +813,7 @@ fn parse_create_table_with_variant_default_expressions() {
" b DATETIME EPHEMERAL now(),",
" c DATETIME EPHEMERAL,",
" d STRING ALIAS toString(c)",
") ENGINE=MergeTree"
") ENGINE = MergeTree"
);
match clickhouse_and_generic().verified_stmt(sql) {
Statement::CreateTable(CreateTable { columns, .. }) => {
@ -903,7 +918,7 @@ fn parse_create_view_with_fields_data_types() {
}]),
vec![]
)),
options: None
options: None,
},
ViewColumnDef {
name: "f".into(),
@ -915,7 +930,7 @@ fn parse_create_view_with_fields_data_types() {
}]),
vec![]
)),
options: None
options: None,
},
]
);
@ -954,38 +969,103 @@ fn parse_limit_by() {
#[test]
fn parse_settings_in_query() {
match clickhouse_and_generic()
.verified_stmt(r#"SELECT * FROM t SETTINGS max_threads = 1, max_block_size = 10000"#)
{
Statement::Query(query) => {
assert_eq!(
query.settings,
Some(vec![
Setting {
key: Ident::new("max_threads"),
value: Number("1".parse().unwrap(), false)
},
Setting {
key: Ident::new("max_block_size"),
value: Number("10000".parse().unwrap(), false)
},
])
);
fn check_settings(sql: &str, expected: Vec<Setting>) {
match clickhouse_and_generic().verified_stmt(sql) {
Statement::Query(q) => {
assert_eq!(q.settings, Some(expected));
}
_ => unreachable!(),
}
_ => unreachable!(),
}
for (sql, expected_settings) in [
(
r#"SELECT * FROM t SETTINGS max_threads = 1, max_block_size = 10000"#,
vec![
Setting {
key: Ident::new("max_threads"),
value: Expr::value(number("1")),
},
Setting {
key: Ident::new("max_block_size"),
value: Expr::value(number("10000")),
},
],
),
(
r#"SELECT * FROM t SETTINGS additional_table_filters = {'table_1': 'x != 2'}"#,
vec![Setting {
key: Ident::new("additional_table_filters"),
value: Expr::Dictionary(vec![DictionaryField {
key: Ident::with_quote('\'', "table_1"),
value: Expr::value(single_quoted_string("x != 2")).into(),
}]),
}],
),
(
r#"SELECT * FROM t SETTINGS additional_result_filter = 'x != 2', query_plan_optimize_lazy_materialization = false"#,
vec![
Setting {
key: Ident::new("additional_result_filter"),
value: Expr::value(single_quoted_string("x != 2")),
},
Setting {
key: Ident::new("query_plan_optimize_lazy_materialization"),
value: Expr::value(Boolean(false)),
},
],
),
] {
check_settings(sql, expected_settings);
}
let invalid_cases = vec![
"SELECT * FROM t SETTINGS a",
"SELECT * FROM t SETTINGS a=",
"SELECT * FROM t SETTINGS a=1, b",
"SELECT * FROM t SETTINGS a=1, b=",
"SELECT * FROM t SETTINGS a=1, b=c",
("SELECT * FROM t SETTINGS a", "Expected: =, found: EOF"),
(
"SELECT * FROM t SETTINGS a=",
"Expected: an expression, found: EOF",
),
("SELECT * FROM t SETTINGS a=1, b", "Expected: =, found: EOF"),
(
"SELECT * FROM t SETTINGS a=1, b=",
"Expected: an expression, found: EOF",
),
(
"SELECT * FROM t SETTINGS a = {",
"Expected: identifier, found: EOF",
),
(
"SELECT * FROM t SETTINGS a = {'b'",
"Expected: :, found: EOF",
),
(
"SELECT * FROM t SETTINGS a = {'b': ",
"Expected: an expression, found: EOF",
),
(
"SELECT * FROM t SETTINGS a = {'b': 'c',}",
"Expected: identifier, found: }",
),
(
"SELECT * FROM t SETTINGS a = {'b': 'c', 'd'}",
"Expected: :, found: }",
),
(
"SELECT * FROM t SETTINGS a = {'b': 'c', 'd': }",
"Expected: an expression, found: }",
),
(
"SELECT * FROM t SETTINGS a = {ANY(b)}",
"Expected: :, found: (",
),
];
for sql in invalid_cases {
clickhouse_and_generic()
.parse_sql_statements(sql)
.expect_err("Expected: SETTINGS key = value, found: ");
for (sql, error_msg) in invalid_cases {
assert_eq!(
clickhouse_and_generic()
.parse_sql_statements(sql)
.unwrap_err(),
ParserError(error_msg.to_string())
);
}
}
#[test]
@ -1334,7 +1414,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
clickhouse().verified_stmt(&format!("USE {}", object_name)),
clickhouse().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -1342,7 +1422,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
clickhouse().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
clickhouse().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),
@ -1356,7 +1436,7 @@ fn parse_use() {
fn test_query_with_format_clause() {
let format_options = vec!["TabSeparated", "JSONCompact", "NULL"];
for format in &format_options {
let sql = format!("SELECT * FROM t FORMAT {}", format);
let sql = format!("SELECT * FROM t FORMAT {format}");
match clickhouse_and_generic().verified_stmt(&sql) {
Statement::Query(query) => {
if *format == "NULL" {
@ -1539,11 +1619,11 @@ fn parse_select_table_function_settings() {
settings: Some(vec![
Setting {
key: "s0".into(),
value: Value::Number("3".parse().unwrap(), false),
value: Expr::value(number("3")),
},
Setting {
key: "s1".into(),
value: Value::SingleQuotedString("s".into()),
value: Expr::value(single_quoted_string("s")),
},
]),
},
@ -1564,11 +1644,11 @@ fn parse_select_table_function_settings() {
settings: Some(vec![
Setting {
key: "s0".into(),
value: Value::Number("3".parse().unwrap(), false),
value: Expr::value(number("3")),
},
Setting {
key: "s1".into(),
value: Value::SingleQuotedString("s".into()),
value: Expr::value(single_quoted_string("s")),
},
]),
},
@ -1578,7 +1658,6 @@ fn parse_select_table_function_settings() {
"SELECT * FROM t(SETTINGS a=)",
"SELECT * FROM t(SETTINGS a=1, b)",
"SELECT * FROM t(SETTINGS a=1, b=)",
"SELECT * FROM t(SETTINGS a=1, b=c)",
];
for sql in invalid_cases {
clickhouse_and_generic()

File diff suppressed because it is too large Load diff

View file

@ -15,9 +15,11 @@
// specific language governing permissions and limitations
// under the License.
use sqlparser::ast::helpers::attached_token::AttachedToken;
use sqlparser::ast::*;
use sqlparser::dialect::{DatabricksDialect, GenericDialect};
use sqlparser::parser::ParserError;
use sqlparser::tokenizer::Span;
use test_utils::*;
#[macro_use]
@ -108,6 +110,8 @@ fn test_databricks_lambdas() {
Expr::Lambda(LambdaFunction {
params: OneOrManyWithParens::Many(vec![Ident::new("p1"), Ident::new("p2")]),
body: Box::new(Expr::Case {
case_token: AttachedToken::empty(),
end_token: AttachedToken::empty(),
operand: None,
conditions: vec![
CaseWhen {
@ -210,7 +214,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
databricks().verified_stmt(&format!("USE {}", object_name)),
databricks().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -218,7 +222,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
databricks().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
databricks().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),
@ -230,21 +234,21 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with keyword and different type of quotes
assert_eq!(
databricks().verified_stmt(&format!("USE CATALOG {0}my_catalog{0}", quote)),
databricks().verified_stmt(&format!("USE CATALOG {quote}my_catalog{quote}")),
Statement::Use(Use::Catalog(ObjectName::from(vec![Ident::with_quote(
quote,
"my_catalog".to_string(),
)])))
);
assert_eq!(
databricks().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)),
databricks().verified_stmt(&format!("USE DATABASE {quote}my_database{quote}")),
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
quote,
"my_database".to_string(),
)])))
);
assert_eq!(
databricks().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)),
databricks().verified_stmt(&format!("USE SCHEMA {quote}my_schema{quote}")),
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
quote,
"my_schema".to_string(),
@ -325,7 +329,10 @@ fn data_type_timestamp_ntz() {
databricks().verified_expr("TIMESTAMP_NTZ '2025-03-29T18:52:00'"),
Expr::TypedString {
data_type: DataType::TimestampNtz,
value: Value::SingleQuotedString("2025-03-29T18:52:00".to_owned())
value: ValueWithSpan {
value: Value::SingleQuotedString("2025-03-29T18:52:00".to_owned()),
span: Span::empty(),
}
}
);
@ -354,6 +361,6 @@ fn data_type_timestamp_ntz() {
}]
);
}
s => panic!("Unexpected statement: {:?}", s),
s => panic!("Unexpected statement: {s:?}"),
}
}

View file

@ -24,6 +24,7 @@ use test_utils::*;
use sqlparser::ast::*;
use sqlparser::dialect::{DuckDbDialect, GenericDialect};
use sqlparser::parser::ParserError;
fn duckdb() -> TestedDialects {
TestedDialects::new(vec![Box::new(DuckDbDialect {})])
@ -44,10 +45,12 @@ fn test_struct() {
StructField {
field_name: Some(Ident::new("v")),
field_type: DataType::Varchar(None),
options: None,
},
StructField {
field_name: Some(Ident::new("i")),
field_type: DataType::Integer(None),
options: None,
},
],
StructBracketKind::Parentheses,
@ -84,6 +87,7 @@ fn test_struct() {
StructField {
field_name: Some(Ident::new("v")),
field_type: DataType::Varchar(None),
options: None,
},
StructField {
field_name: Some(Ident::new("s")),
@ -92,14 +96,17 @@ fn test_struct() {
StructField {
field_name: Some(Ident::new("a1")),
field_type: DataType::Integer(None),
options: None,
},
StructField {
field_name: Some(Ident::new("a2")),
field_type: DataType::Varchar(None),
options: None,
},
],
StructBracketKind::Parentheses,
),
options: None,
},
],
StructBracketKind::Parentheses,
@ -362,7 +369,7 @@ fn test_duckdb_specific_int_types() {
("HUGEINT", DataType::HugeInt),
];
for (dtype_string, data_type) in duckdb_dtypes {
let sql = format!("SELECT 123::{}", dtype_string);
let sql = format!("SELECT 123::{dtype_string}");
let select = duckdb().verified_only_select(&sql);
assert_eq!(
&Expr::Cast {
@ -735,19 +742,13 @@ fn test_duckdb_union_datatype() {
storage: Default::default(),
location: Default::default()
}),
table_properties: Default::default(),
with_options: Default::default(),
file_format: Default::default(),
location: Default::default(),
query: Default::default(),
without_rowid: Default::default(),
like: Default::default(),
clone: Default::default(),
engine: Default::default(),
comment: Default::default(),
auto_increment_offset: Default::default(),
default_charset: Default::default(),
collation: Default::default(),
on_commit: Default::default(),
on_cluster: Default::default(),
primary_key: Default::default(),
@ -755,7 +756,6 @@ fn test_duckdb_union_datatype() {
partition_by: Default::default(),
cluster_by: Default::default(),
clustered_by: Default::default(),
options: Default::default(),
inherits: Default::default(),
strict: Default::default(),
copy_grants: Default::default(),
@ -772,6 +772,7 @@ fn test_duckdb_union_datatype() {
catalog: Default::default(),
catalog_sync: Default::default(),
storage_serialization_policy: Default::default(),
table_options: CreateTableOptions::None
}),
stmt
);
@ -792,7 +793,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
duckdb().verified_stmt(&format!("USE {}", object_name)),
duckdb().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -800,7 +801,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
duckdb().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
duckdb().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),
@ -812,7 +813,9 @@ fn parse_use() {
for &quote in &quote_styles {
// Test double identifier with different type of quotes
assert_eq!(
duckdb().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)),
duckdb().verified_stmt(&format!(
"USE {quote}CATALOG{quote}.{quote}my_schema{quote}"
)),
Statement::Use(Use::Object(ObjectName::from(vec![
Ident::with_quote(quote, "CATALOG"),
Ident::with_quote(quote, "my_schema")
@ -828,3 +831,32 @@ fn parse_use() {
])))
);
}
#[test]
fn test_duckdb_trim() {
let real_sql = r#"SELECT customer_id, TRIM(item_price_id, '"', "a") AS item_price_id FROM models_staging.subscriptions"#;
assert_eq!(duckdb().verified_stmt(real_sql).to_string(), real_sql);
let sql_only_select = "SELECT TRIM('xyz', 'a')";
let select = duckdb().verified_only_select(sql_only_select);
assert_eq!(
&Expr::Trim {
expr: Box::new(Expr::Value(
Value::SingleQuotedString("xyz".to_owned()).with_empty_span()
)),
trim_where: None,
trim_what: None,
trim_characters: Some(vec![Expr::Value(
Value::SingleQuotedString("a".to_owned()).with_empty_span()
)]),
},
expr_from_projection(only(&select.projection))
);
// missing comma separation
let error_sql = "SELECT TRIM('xyz' 'a')";
assert_eq!(
ParserError::ParserError("Expected: ), found: 'a'".to_owned()),
duckdb().parse_sql_statements(error_sql).unwrap_err()
);
}

View file

@ -133,9 +133,7 @@ fn create_table_with_comment() {
Statement::CreateTable(CreateTable { comment, .. }) => {
assert_eq!(
comment,
Some(CommentDef::AfterColumnDefsWithoutEq(
"table comment".to_string()
))
Some(CommentDef::WithoutEq("table comment".to_string()))
)
}
_ => unreachable!(),
@ -343,6 +341,9 @@ fn lateral_view() {
fn sort_by() {
let sort_by = "SELECT * FROM db.table SORT BY a";
hive().verified_stmt(sort_by);
let sort_by_with_direction = "SELECT * FROM db.table SORT BY a, b DESC";
hive().verified_stmt(sort_by_with_direction);
}
#[test]
@ -523,7 +524,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
hive().verified_stmt(&format!("USE {}", object_name)),
hive().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -531,7 +532,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
hive().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
hive().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),

View file

@ -23,7 +23,8 @@
mod test_utils;
use helpers::attached_token::AttachedToken;
use sqlparser::tokenizer::{Location, Span};
use sqlparser::keywords::Keyword;
use sqlparser::tokenizer::{Location, Span, Token, TokenWithSpan, Word};
use test_utils::*;
use sqlparser::ast::DataType::{Int, Text, Varbinary};
@ -99,47 +100,52 @@ fn parse_mssql_delimited_identifiers() {
#[test]
fn parse_create_procedure() {
let sql = "CREATE OR ALTER PROCEDURE test (@foo INT, @bar VARCHAR(256)) AS BEGIN SELECT 1 END";
let sql = "CREATE OR ALTER PROCEDURE test (@foo INT, @bar VARCHAR(256)) AS BEGIN SELECT 1; END";
assert_eq!(
ms().verified_stmt(sql),
Statement::CreateProcedure {
or_alter: true,
body: vec![Statement::Query(Box::new(Query {
with: None,
limit_clause: None,
fetch: None,
locks: vec![],
for_clause: None,
order_by: None,
settings: None,
format_clause: None,
body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None,
top: None,
top_before_distinct: false,
projection: vec![SelectItem::UnnamedExpr(Expr::Value(
(number("1")).with_empty_span()
))],
into: None,
from: vec![],
lateral_views: vec![],
prewhere: None,
selection: None,
group_by: GroupByExpr::Expressions(vec![], vec![]),
cluster_by: vec![],
distribute_by: vec![],
sort_by: vec![],
having: None,
named_window: vec![],
window_before_qualify: false,
qualify: None,
value_table_mode: None,
connect_by: None,
flavor: SelectFlavor::Standard,
})))
}))],
body: ConditionalStatements::BeginEnd(BeginEndStatements {
begin_token: AttachedToken::empty(),
statements: vec![Statement::Query(Box::new(Query {
with: None,
limit_clause: None,
fetch: None,
locks: vec![],
for_clause: None,
order_by: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None,
top: None,
top_before_distinct: false,
projection: vec![SelectItem::UnnamedExpr(Expr::Value(
(number("1")).with_empty_span()
))],
into: None,
from: vec![],
lateral_views: vec![],
prewhere: None,
selection: None,
group_by: GroupByExpr::Expressions(vec![], vec![]),
cluster_by: vec![],
distribute_by: vec![],
sort_by: vec![],
having: None,
named_window: vec![],
window_before_qualify: false,
qualify: None,
value_table_mode: None,
connect_by: None,
flavor: SelectFlavor::Standard,
})))
}))],
end_token: AttachedToken::empty(),
}),
params: Some(vec![
ProcedureParam {
name: Ident {
@ -147,7 +153,8 @@ fn parse_create_procedure() {
quote_style: None,
span: Span::empty(),
},
data_type: DataType::Int(None)
data_type: DataType::Int(None),
mode: None,
},
ProcedureParam {
name: Ident {
@ -158,33 +165,36 @@ fn parse_create_procedure() {
data_type: DataType::Varchar(Some(CharacterLength::IntegerLength {
length: 256,
unit: None
}))
})),
mode: None,
}
]),
name: ObjectName::from(vec![Ident {
value: "test".into(),
quote_style: None,
span: Span::empty(),
}])
}]),
language: None,
}
)
}
#[test]
fn parse_mssql_create_procedure() {
let _ = ms_and_generic().verified_stmt("CREATE OR ALTER PROCEDURE foo AS BEGIN SELECT 1 END");
let _ = ms_and_generic().verified_stmt("CREATE PROCEDURE foo AS BEGIN SELECT 1 END");
let _ = ms_and_generic().verified_stmt("CREATE OR ALTER PROCEDURE foo AS SELECT 1;");
let _ = ms_and_generic().verified_stmt("CREATE OR ALTER PROCEDURE foo AS BEGIN SELECT 1; END");
let _ = ms_and_generic().verified_stmt("CREATE PROCEDURE foo AS BEGIN SELECT 1; END");
let _ = ms().verified_stmt(
"CREATE PROCEDURE foo AS BEGIN SELECT [myColumn] FROM [myschema].[mytable] END",
"CREATE PROCEDURE foo AS BEGIN SELECT [myColumn] FROM [myschema].[mytable]; END",
);
let _ = ms_and_generic().verified_stmt(
"CREATE PROCEDURE foo (@CustomerName NVARCHAR(50)) AS BEGIN SELECT * FROM DEV END",
"CREATE PROCEDURE foo (@CustomerName NVARCHAR(50)) AS BEGIN SELECT * FROM DEV; END",
);
let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN UPDATE bar SET col = 'test' END");
let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN UPDATE bar SET col = 'test'; END");
// Test a statement with END in it
let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN SELECT [foo], CASE WHEN [foo] IS NULL THEN 'empty' ELSE 'notempty' END AS [foo] END");
let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN SELECT [foo], CASE WHEN [foo] IS NULL THEN 'empty' ELSE 'notempty' END AS [foo]; END");
// Multiple statements
let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN UPDATE bar SET col = 'test'; SELECT [foo] FROM BAR WHERE [FOO] > 10 END");
let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN UPDATE bar SET col = 'test'; SELECT [foo] FROM BAR WHERE [FOO] > 10; END");
}
#[test]
@ -222,7 +232,7 @@ fn parse_create_function() {
value: Some(ReturnStatementValue::Expr(Expr::Value(
(number("1")).with_empty_span()
))),
}),],
})],
end_token: AttachedToken::empty(),
})),
behavior: None,
@ -247,6 +257,12 @@ fn parse_create_function() {
";
let _ = ms().verified_stmt(multi_statement_function);
let multi_statement_function_without_as = multi_statement_function.replace(" AS", "");
let _ = ms().one_statement_parses_to(
&multi_statement_function_without_as,
multi_statement_function,
);
let create_function_with_conditional = "\
CREATE FUNCTION some_scalar_udf() \
RETURNS INT \
@ -271,6 +287,137 @@ fn parse_create_function() {
END\
";
let _ = ms().verified_stmt(create_or_alter_function);
let create_function_with_return_expression = "\
CREATE FUNCTION some_scalar_udf(@foo INT, @bar VARCHAR(256)) \
RETURNS INT \
AS \
BEGIN \
RETURN CONVERT(INT, 1) + 2; \
END\
";
let _ = ms().verified_stmt(create_function_with_return_expression);
let create_inline_table_value_function = "\
CREATE FUNCTION some_inline_tvf(@foo INT, @bar VARCHAR(256)) \
RETURNS TABLE \
AS \
RETURN (SELECT 1 AS col_1)\
";
let _ = ms().verified_stmt(create_inline_table_value_function);
let create_inline_table_value_function_without_parentheses = "\
CREATE FUNCTION some_inline_tvf(@foo INT, @bar VARCHAR(256)) \
RETURNS TABLE \
AS \
RETURN SELECT 1 AS col_1\
";
let _ = ms().verified_stmt(create_inline_table_value_function_without_parentheses);
let create_inline_table_value_function_without_as =
create_inline_table_value_function.replace(" AS", "");
let _ = ms().one_statement_parses_to(
&create_inline_table_value_function_without_as,
create_inline_table_value_function,
);
let create_multi_statement_table_value_function = "\
CREATE FUNCTION some_multi_statement_tvf(@foo INT, @bar VARCHAR(256)) \
RETURNS @t TABLE (col_1 INT) \
AS \
BEGIN \
INSERT INTO @t SELECT 1; \
RETURN; \
END\
";
let _ = ms().verified_stmt(create_multi_statement_table_value_function);
let create_multi_statement_table_value_function_without_as =
create_multi_statement_table_value_function.replace(" AS", "");
let _ = ms().one_statement_parses_to(
&create_multi_statement_table_value_function_without_as,
create_multi_statement_table_value_function,
);
let create_multi_statement_table_value_function_with_constraints = "\
CREATE FUNCTION some_multi_statement_tvf(@foo INT, @bar VARCHAR(256)) \
RETURNS @t TABLE (col_1 INT NOT NULL) \
AS \
BEGIN \
INSERT INTO @t SELECT 1; \
RETURN @t; \
END\
";
let _ = ms().verified_stmt(create_multi_statement_table_value_function_with_constraints);
let create_multi_statement_tvf_without_table_definition = "\
CREATE FUNCTION incorrect_tvf(@foo INT, @bar VARCHAR(256)) \
RETURNS @t TABLE ()
AS \
BEGIN \
INSERT INTO @t SELECT 1; \
RETURN @t; \
END\
";
assert_eq!(
ParserError::ParserError("Unparsable function body".to_owned()),
ms().parse_sql_statements(create_multi_statement_tvf_without_table_definition)
.unwrap_err()
);
let create_inline_tvf_without_subquery_or_bare_select = "\
CREATE FUNCTION incorrect_tvf(@foo INT, @bar VARCHAR(256)) \
RETURNS TABLE
AS \
RETURN 'hi'\
";
assert_eq!(
ParserError::ParserError(
"Expected a subquery (or bare SELECT statement) after RETURN".to_owned()
),
ms().parse_sql_statements(create_inline_tvf_without_subquery_or_bare_select)
.unwrap_err()
);
}
#[test]
fn parse_create_function_parameter_default_values() {
let single_default_sql =
"CREATE FUNCTION test_func(@param1 INT = 42) RETURNS INT AS BEGIN RETURN @param1; END";
assert_eq!(
ms().verified_stmt(single_default_sql),
Statement::CreateFunction(CreateFunction {
or_alter: false,
or_replace: false,
temporary: false,
if_not_exists: false,
name: ObjectName::from(vec![Ident::new("test_func")]),
args: Some(vec![OperateFunctionArg {
mode: None,
name: Some(Ident::new("@param1")),
data_type: DataType::Int(None),
default_expr: Some(Expr::Value((number("42")).with_empty_span())),
},]),
return_type: Some(DataType::Int(None)),
function_body: Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
begin_token: AttachedToken::empty(),
statements: vec![Statement::Return(ReturnStatement {
value: Some(ReturnStatementValue::Expr(Expr::Identifier(Ident::new(
"@param1"
)))),
})],
end_token: AttachedToken::empty(),
})),
behavior: None,
called_on_null: None,
parallel: None,
using: None,
language: None,
determinism_specifier: None,
options: None,
remote_connection: None,
}),
);
}
#[test]
@ -1252,6 +1399,7 @@ fn parse_substring_in_select() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}),
query
);
@ -1354,6 +1502,8 @@ fn parse_mssql_declare() {
order_by: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None,
@ -1393,6 +1543,85 @@ fn parse_mssql_declare() {
let _ = ms().verified_stmt(declare_cursor_for_select);
}
#[test]
fn test_mssql_cursor() {
let full_cursor_usage = "\
DECLARE Employee_Cursor CURSOR FOR \
SELECT LastName, FirstName \
FROM AdventureWorks2022.HumanResources.vEmployee \
WHERE LastName LIKE 'B%'; \
\
OPEN Employee_Cursor; \
\
FETCH NEXT FROM Employee_Cursor; \
\
WHILE @@FETCH_STATUS = 0 \
BEGIN \
FETCH NEXT FROM Employee_Cursor; \
END; \
\
CLOSE Employee_Cursor; \
DEALLOCATE Employee_Cursor\
";
let _ = ms().statements_parse_to(full_cursor_usage, "");
}
#[test]
fn test_mssql_while_statement() {
let while_single_statement = "WHILE 1 = 0 PRINT 'Hello World';";
let stmt = ms().verified_stmt(while_single_statement);
assert_eq!(
stmt,
Statement::While(sqlparser::ast::WhileStatement {
while_block: ConditionalStatementBlock {
start_token: AttachedToken(TokenWithSpan {
token: Token::Word(Word {
value: "WHILE".to_string(),
quote_style: None,
keyword: Keyword::WHILE
}),
span: Span::empty()
}),
condition: Some(Expr::BinaryOp {
left: Box::new(Expr::Value(
(Value::Number("1".parse().unwrap(), false)).with_empty_span()
)),
op: BinaryOperator::Eq,
right: Box::new(Expr::Value(
(Value::Number("0".parse().unwrap(), false)).with_empty_span()
)),
}),
then_token: None,
conditional_statements: ConditionalStatements::Sequence {
statements: vec![Statement::Print(PrintStatement {
message: Box::new(Expr::Value(
(Value::SingleQuotedString("Hello World".to_string()))
.with_empty_span()
)),
})],
}
}
})
);
let while_begin_end = "\
WHILE @@FETCH_STATUS = 0 \
BEGIN \
FETCH NEXT FROM Employee_Cursor; \
END\
";
let _ = ms().verified_stmt(while_begin_end);
let while_begin_end_multiple_statements = "\
WHILE @@FETCH_STATUS = 0 \
BEGIN \
FETCH NEXT FROM Employee_Cursor; \
PRINT 'Hello World'; \
END\
";
let _ = ms().verified_stmt(while_begin_end_multiple_statements);
}
#[test]
fn test_parse_raiserror() {
let sql = r#"RAISERROR('This is a test', 16, 1)"#;
@ -1444,7 +1673,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
ms().verified_stmt(&format!("USE {}", object_name)),
ms().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -1452,7 +1681,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
ms().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
ms().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),
@ -1641,7 +1870,6 @@ fn parse_create_table_with_valid_options() {
span: Span::empty(),
},
data_type: Int(None,),
options: vec![],
},
ColumnDef {
@ -1651,7 +1879,6 @@ fn parse_create_table_with_valid_options() {
span: Span::empty(),
},
data_type: Int(None,),
options: vec![],
},
],
@ -1663,19 +1890,13 @@ fn parse_create_table_with_valid_options() {
storage: None,
location: None,
},),
table_properties: vec![],
with_options,
file_format: None,
location: None,
query: None,
without_rowid: false,
like: None,
clone: None,
engine: None,
comment: None,
auto_increment_offset: None,
default_charset: None,
collation: None,
on_commit: None,
on_cluster: None,
primary_key: None,
@ -1683,7 +1904,6 @@ fn parse_create_table_with_valid_options() {
partition_by: None,
cluster_by: None,
clustered_by: None,
options: None,
inherits: None,
strict: false,
iceberg: false,
@ -1701,6 +1921,7 @@ fn parse_create_table_with_valid_options() {
catalog: None,
catalog_sync: None,
storage_serialization_policy: None,
table_options: CreateTableOptions::With(with_options)
})
);
}
@ -1834,19 +2055,13 @@ fn parse_create_table_with_identity_column() {
storage: None,
location: None,
},),
table_properties: vec![],
with_options: vec![],
file_format: None,
location: None,
query: None,
without_rowid: false,
like: None,
clone: None,
engine: None,
comment: None,
auto_increment_offset: None,
default_charset: None,
collation: None,
on_commit: None,
on_cluster: None,
primary_key: None,
@ -1854,7 +2069,6 @@ fn parse_create_table_with_identity_column() {
partition_by: None,
cluster_by: None,
clustered_by: None,
options: None,
inherits: None,
strict: false,
copy_grants: false,
@ -1871,6 +2085,7 @@ fn parse_create_table_with_identity_column() {
catalog: None,
catalog_sync: None,
storage_serialization_policy: None,
table_options: CreateTableOptions::None
}),
);
}
@ -1972,7 +2187,7 @@ fn parse_mssql_if_else() {
"IF 1 = 1 BEGIN SET @A = 1; END ELSE SET @A = 2;"
);
}
_ => panic!("Unexpected statements: {:?}", stmts),
_ => panic!("Unexpected statements: {stmts:?}"),
}
}
@ -2022,7 +2237,7 @@ fn test_mssql_if_statements_span() {
Span::new(Location::new(1, 21), Location::new(1, 36))
);
}
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Blocks
@ -2043,7 +2258,7 @@ fn test_mssql_if_statements_span() {
Span::new(Location::new(1, 32), Location::new(1, 57))
);
}
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
}
@ -2129,6 +2344,101 @@ fn parse_mssql_merge_with_output() {
ms_and_generic().verified_stmt(stmt);
}
#[test]
fn parse_create_trigger() {
let create_trigger = "\
CREATE OR ALTER TRIGGER reminder1 \
ON Sales.Customer \
AFTER INSERT, UPDATE \
AS RAISERROR('Notify Customer Relations', 16, 10);\
";
let create_stmt = ms().verified_stmt(create_trigger);
assert_eq!(
create_stmt,
Statement::CreateTrigger {
or_alter: true,
or_replace: false,
is_constraint: false,
name: ObjectName::from(vec![Ident::new("reminder1")]),
period: TriggerPeriod::After,
events: vec![TriggerEvent::Insert, TriggerEvent::Update(vec![]),],
table_name: ObjectName::from(vec![Ident::new("Sales"), Ident::new("Customer")]),
referenced_table_name: None,
referencing: vec![],
trigger_object: TriggerObject::Statement,
include_each: false,
condition: None,
exec_body: None,
statements: Some(ConditionalStatements::Sequence {
statements: vec![Statement::RaisError {
message: Box::new(Expr::Value(
(Value::SingleQuotedString("Notify Customer Relations".to_string()))
.with_empty_span()
)),
severity: Box::new(Expr::Value(
(Value::Number("16".parse().unwrap(), false)).with_empty_span()
)),
state: Box::new(Expr::Value(
(Value::Number("10".parse().unwrap(), false)).with_empty_span()
)),
arguments: vec![],
options: vec![],
}],
}),
characteristics: None,
}
);
let multi_statement_as_trigger = "\
CREATE TRIGGER some_trigger ON some_table FOR INSERT \
AS \
DECLARE @var INT; \
RAISERROR('Trigger fired', 10, 1);\
";
let _ = ms().verified_stmt(multi_statement_as_trigger);
let multi_statement_trigger = "\
CREATE TRIGGER some_trigger ON some_table FOR INSERT \
AS \
BEGIN \
DECLARE @var INT; \
RAISERROR('Trigger fired', 10, 1); \
END\
";
let _ = ms().verified_stmt(multi_statement_trigger);
let create_trigger_with_return = "\
CREATE TRIGGER some_trigger ON some_table FOR INSERT \
AS \
BEGIN \
RETURN; \
END\
";
let _ = ms().verified_stmt(create_trigger_with_return);
let create_trigger_with_return = "\
CREATE TRIGGER some_trigger ON some_table FOR INSERT \
AS \
BEGIN \
RETURN; \
END\
";
let _ = ms().verified_stmt(create_trigger_with_return);
let create_trigger_with_conditional = "\
CREATE TRIGGER some_trigger ON some_table FOR INSERT \
AS \
BEGIN \
IF 1 = 2 \
BEGIN \
RAISERROR('Trigger fired', 10, 1); \
END; \
RETURN; \
END\
";
let _ = ms().verified_stmt(create_trigger_with_conditional);
}
#[test]
fn parse_drop_trigger() {
let sql_drop_trigger = "DROP TRIGGER emp_stamp;";
@ -2160,3 +2470,13 @@ fn parse_print() {
let _ = ms().verified_stmt("PRINT N'Hello, ⛄️!'");
let _ = ms().verified_stmt("PRINT @my_variable");
}
#[test]
fn parse_mssql_grant() {
ms().verified_stmt("GRANT SELECT ON my_table TO public, db_admin");
}
#[test]
fn parse_mssql_deny() {
ms().verified_stmt("DENY SELECT ON my_table TO public, db_admin");
}

View file

@ -593,7 +593,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
mysql_and_generic().verified_stmt(&format!("USE {}", object_name)),
mysql_and_generic().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -601,8 +601,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
mysql_and_generic()
.verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
mysql_and_generic().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),
@ -670,6 +669,20 @@ fn table_constraint_unique_primary_ctor(
characteristics: Option<ConstraintCharacteristics>,
unique_index_type_display: Option<KeyOrIndexDisplay>,
) -> TableConstraint {
let columns = columns
.into_iter()
.map(|ident| IndexColumn {
column: OrderByExpr {
expr: Expr::Identifier(ident),
options: OrderByOptions {
asc: None,
nulls_first: None,
},
with_fill: None,
},
operator_class: None,
})
.collect();
match unique_index_type_display {
Some(index_type_display) => TableConstraint::Unique {
name,
@ -795,6 +808,67 @@ fn parse_create_table_primary_and_unique_key_with_index_options() {
}
}
#[test]
fn parse_prefix_key_part() {
let expected = vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::value(
number("10"),
)))];
for sql in [
"CREATE INDEX idx_index ON t(textcol(10))",
"ALTER TABLE tab ADD INDEX idx_index (textcol(10))",
"ALTER TABLE tab ADD PRIMARY KEY (textcol(10))",
"ALTER TABLE tab ADD UNIQUE KEY (textcol(10))",
"ALTER TABLE tab ADD UNIQUE KEY (textcol(10))",
"ALTER TABLE tab ADD FULLTEXT INDEX (textcol(10))",
"CREATE TABLE t (textcol TEXT, INDEX idx_index (textcol(10)))",
] {
match index_column(mysql_and_generic().verified_stmt(sql)) {
Expr::Function(Function {
name,
args: FunctionArguments::List(FunctionArgumentList { args, .. }),
..
}) => {
assert_eq!(name.to_string(), "textcol");
assert_eq!(args, expected);
}
expr => panic!("unexpected expression {expr} for {sql}"),
}
}
}
#[test]
fn test_functional_key_part() {
assert_eq!(
index_column(
mysql_and_generic()
.verified_stmt("CREATE INDEX idx_index ON t((col COLLATE utf8mb4_bin) DESC)")
),
Expr::Nested(Box::new(Expr::Collate {
expr: Box::new(Expr::Identifier("col".into())),
collation: ObjectName(vec![sqlparser::ast::ObjectNamePart::Identifier(
Ident::new("utf8mb4_bin")
)]),
}))
);
assert_eq!(
index_column(mysql_and_generic().verified_stmt(
r#"CREATE TABLE t (jsoncol JSON, PRIMARY KEY ((CAST(col ->> '$.id' AS UNSIGNED)) ASC))"#
)),
Expr::Nested(Box::new(Expr::Cast {
kind: CastKind::Cast,
expr: Box::new(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new("col"))),
op: BinaryOperator::LongArrow,
right: Box::new(Expr::Value(
Value::SingleQuotedString("$.id".to_string()).with_empty_span()
)),
}),
data_type: DataType::Unsigned,
format: None,
})),
);
}
#[test]
fn parse_create_table_primary_and_unique_key_with_index_type() {
let sqls = ["UNIQUE", "PRIMARY KEY"].map(|key_ty| {
@ -848,9 +922,23 @@ fn parse_create_table_comment() {
for sql in [without_equal, with_equal] {
match mysql().verified_stmt(sql) {
Statement::CreateTable(CreateTable { name, comment, .. }) => {
Statement::CreateTable(CreateTable {
name,
table_options,
..
}) => {
assert_eq!(name.to_string(), "foo");
assert_eq!(comment.expect("Should exist").to_string(), "baz");
let plain_options = match table_options {
CreateTableOptions::Plain(options) => options,
_ => unreachable!(),
};
let comment = match plain_options.first().unwrap() {
SqlOption::Comment(CommentDef::WithEq(c))
| SqlOption::Comment(CommentDef::WithoutEq(c)) => c,
_ => unreachable!(),
};
assert_eq!(comment, "baz");
}
_ => unreachable!(),
}
@ -859,29 +947,226 @@ fn parse_create_table_comment() {
#[test]
fn parse_create_table_auto_increment_offset() {
let canonical =
"CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT 123";
let with_equal =
"CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT=123";
let sql =
"CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE = InnoDB AUTO_INCREMENT = 123";
for sql in [canonical, with_equal] {
match mysql().one_statement_parses_to(sql, canonical) {
match mysql().verified_stmt(sql) {
Statement::CreateTable(CreateTable {
name,
table_options,
..
}) => {
assert_eq!(name.to_string(), "foo");
let plain_options = match table_options {
CreateTableOptions::Plain(options) => options,
_ => unreachable!(),
};
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("AUTO_INCREMENT"),
value: Expr::Value(test_utils::number("123").with_empty_span())
}));
}
_ => unreachable!(),
}
}
#[test]
fn parse_create_table_multiple_options_order_independent() {
let sql1 = "CREATE TABLE mytable (id INT) ENGINE=InnoDB ROW_FORMAT=DYNAMIC KEY_BLOCK_SIZE=8 COMMENT='abc'";
let sql2 = "CREATE TABLE mytable (id INT) KEY_BLOCK_SIZE=8 COMMENT='abc' ENGINE=InnoDB ROW_FORMAT=DYNAMIC";
let sql3 = "CREATE TABLE mytable (id INT) ROW_FORMAT=DYNAMIC KEY_BLOCK_SIZE=8 COMMENT='abc' ENGINE=InnoDB";
for sql in [sql1, sql2, sql3] {
match mysql().parse_sql_statements(sql).unwrap().pop().unwrap() {
Statement::CreateTable(CreateTable {
name,
auto_increment_offset,
table_options,
..
}) => {
assert_eq!(name.to_string(), "foo");
assert_eq!(
auto_increment_offset.expect("Should exist").to_string(),
"123"
);
assert_eq!(name.to_string(), "mytable");
let plain_options = match table_options {
CreateTableOptions::Plain(options) => options,
_ => unreachable!(),
};
assert!(plain_options.contains(&SqlOption::NamedParenthesizedList(
NamedParenthesizedList {
key: Ident::new("ENGINE"),
name: Some(Ident::new("InnoDB")),
values: vec![]
}
)));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("KEY_BLOCK_SIZE"),
value: Expr::Value(test_utils::number("8").with_empty_span())
}));
assert!(plain_options
.contains(&SqlOption::Comment(CommentDef::WithEq("abc".to_owned()))));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("ROW_FORMAT"),
value: Expr::Identifier(Ident::new("DYNAMIC".to_owned()))
}));
}
_ => unreachable!(),
}
}
}
#[test]
fn parse_create_table_with_all_table_options() {
let sql =
"CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE = InnoDB AUTO_INCREMENT = 123 DEFAULT CHARSET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci INSERT_METHOD = FIRST KEY_BLOCK_SIZE = 8 ROW_FORMAT = DYNAMIC DATA DIRECTORY = '/var/lib/mysql/data' INDEX DIRECTORY = '/var/lib/mysql/index' PACK_KEYS = 1 STATS_AUTO_RECALC = 1 STATS_PERSISTENT = 0 STATS_SAMPLE_PAGES = 128 DELAY_KEY_WRITE = 1 COMPRESSION = 'ZLIB' ENCRYPTION = 'Y' MAX_ROWS = 10000 MIN_ROWS = 10 AUTOEXTEND_SIZE = 64 AVG_ROW_LENGTH = 128 CHECKSUM = 1 CONNECTION = 'mysql://localhost' ENGINE_ATTRIBUTE = 'primary' PASSWORD = 'secure_password' SECONDARY_ENGINE_ATTRIBUTE = 'secondary_attr' START TRANSACTION TABLESPACE my_tablespace STORAGE DISK UNION = (table1, table2, table3)";
match mysql().verified_stmt(sql) {
Statement::CreateTable(CreateTable {
name,
table_options,
..
}) => {
assert_eq!(name, vec![Ident::new("foo".to_owned())].into());
let plain_options = match table_options {
CreateTableOptions::Plain(options) => options,
_ => unreachable!(),
};
assert!(plain_options.contains(&SqlOption::NamedParenthesizedList(
NamedParenthesizedList {
key: Ident::new("ENGINE"),
name: Some(Ident::new("InnoDB")),
values: vec![]
}
)));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("COLLATE"),
value: Expr::Identifier(Ident::new("utf8mb4_0900_ai_ci".to_owned()))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("DEFAULT CHARSET"),
value: Expr::Identifier(Ident::new("utf8mb4".to_owned()))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("AUTO_INCREMENT"),
value: Expr::value(test_utils::number("123"))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("KEY_BLOCK_SIZE"),
value: Expr::value(test_utils::number("8"))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("ROW_FORMAT"),
value: Expr::Identifier(Ident::new("DYNAMIC".to_owned()))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("PACK_KEYS"),
value: Expr::value(test_utils::number("1"))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("STATS_AUTO_RECALC"),
value: Expr::value(test_utils::number("1"))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("STATS_PERSISTENT"),
value: Expr::value(test_utils::number("0"))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("STATS_SAMPLE_PAGES"),
value: Expr::value(test_utils::number("128"))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("STATS_SAMPLE_PAGES"),
value: Expr::value(test_utils::number("128"))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("INSERT_METHOD"),
value: Expr::Identifier(Ident::new("FIRST".to_owned()))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("COMPRESSION"),
value: Expr::value(Value::SingleQuotedString("ZLIB".to_owned()))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("ENCRYPTION"),
value: Expr::value(Value::SingleQuotedString("Y".to_owned()))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("MAX_ROWS"),
value: Expr::value(test_utils::number("10000"))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("MIN_ROWS"),
value: Expr::value(test_utils::number("10"))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("AUTOEXTEND_SIZE"),
value: Expr::value(test_utils::number("64"))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("AVG_ROW_LENGTH"),
value: Expr::value(test_utils::number("128"))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("CHECKSUM"),
value: Expr::value(test_utils::number("1"))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("CONNECTION"),
value: Expr::value(Value::SingleQuotedString("mysql://localhost".to_owned()))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("ENGINE_ATTRIBUTE"),
value: Expr::value(Value::SingleQuotedString("primary".to_owned()))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("PASSWORD"),
value: Expr::value(Value::SingleQuotedString("secure_password".to_owned()))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("SECONDARY_ENGINE_ATTRIBUTE"),
value: Expr::value(Value::SingleQuotedString("secondary_attr".to_owned()))
}));
assert!(plain_options.contains(&SqlOption::Ident(Ident::new(
"START TRANSACTION".to_owned()
))));
assert!(
plain_options.contains(&SqlOption::TableSpace(TablespaceOption {
name: "my_tablespace".to_string(),
storage: Some(StorageType::Disk),
}))
);
assert!(plain_options.contains(&SqlOption::NamedParenthesizedList(
NamedParenthesizedList {
key: Ident::new("UNION"),
name: None,
values: vec![
Ident::new("table1".to_string()),
Ident::new("table2".to_string()),
Ident::new("table3".to_string())
]
}
)));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("DATA DIRECTORY"),
value: Expr::value(Value::SingleQuotedString("/var/lib/mysql/data".to_owned()))
}));
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("INDEX DIRECTORY"),
value: Expr::value(Value::SingleQuotedString("/var/lib/mysql/index".to_owned()))
}));
}
_ => unreachable!(),
}
}
#[test]
fn parse_create_table_set_enum() {
let sql = "CREATE TABLE foo (bar SET('a', 'b'), baz ENUM('a', 'b'))";
@ -916,13 +1201,12 @@ fn parse_create_table_set_enum() {
#[test]
fn parse_create_table_engine_default_charset() {
let sql = "CREATE TABLE foo (id INT(11)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3";
let sql = "CREATE TABLE foo (id INT(11)) ENGINE = InnoDB DEFAULT CHARSET = utf8mb3";
match mysql().verified_stmt(sql) {
Statement::CreateTable(CreateTable {
name,
columns,
engine,
default_charset,
table_options,
..
}) => {
assert_eq!(name.to_string(), "foo");
@ -934,14 +1218,24 @@ fn parse_create_table_engine_default_charset() {
},],
columns
);
assert_eq!(
engine,
Some(TableEngine {
name: "InnoDB".to_string(),
parameters: None
})
);
assert_eq!(default_charset, Some("utf8mb3".to_string()));
let plain_options = match table_options {
CreateTableOptions::Plain(options) => options,
_ => unreachable!(),
};
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("DEFAULT CHARSET"),
value: Expr::Identifier(Ident::new("utf8mb3".to_owned()))
}));
assert!(plain_options.contains(&SqlOption::NamedParenthesizedList(
NamedParenthesizedList {
key: Ident::new("ENGINE"),
name: Some(Ident::new("InnoDB")),
values: vec![]
}
)));
}
_ => unreachable!(),
}
@ -949,12 +1243,12 @@ fn parse_create_table_engine_default_charset() {
#[test]
fn parse_create_table_collate() {
let sql = "CREATE TABLE foo (id INT(11)) COLLATE=utf8mb4_0900_ai_ci";
let sql = "CREATE TABLE foo (id INT(11)) COLLATE = utf8mb4_0900_ai_ci";
match mysql().verified_stmt(sql) {
Statement::CreateTable(CreateTable {
name,
columns,
collation,
table_options,
..
}) => {
assert_eq!(name.to_string(), "foo");
@ -966,7 +1260,16 @@ fn parse_create_table_collate() {
},],
columns
);
assert_eq!(collation, Some("utf8mb4_0900_ai_ci".to_string()));
let plain_options = match table_options {
CreateTableOptions::Plain(options) => options,
_ => unreachable!(),
};
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("COLLATE"),
value: Expr::Identifier(Ident::new("utf8mb4_0900_ai_ci".to_owned()))
}));
}
_ => unreachable!(),
}
@ -974,16 +1277,26 @@ fn parse_create_table_collate() {
#[test]
fn parse_create_table_both_options_and_as_query() {
let sql = "CREATE TABLE foo (id INT(11)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb4_0900_ai_ci AS SELECT 1";
let sql = "CREATE TABLE foo (id INT(11)) ENGINE = InnoDB DEFAULT CHARSET = utf8mb3 COLLATE = utf8mb4_0900_ai_ci AS SELECT 1";
match mysql_and_generic().verified_stmt(sql) {
Statement::CreateTable(CreateTable {
name,
collation,
query,
table_options,
..
}) => {
assert_eq!(name.to_string(), "foo");
assert_eq!(collation, Some("utf8mb4_0900_ai_ci".to_string()));
let plain_options = match table_options {
CreateTableOptions::Plain(options) => options,
_ => unreachable!(),
};
assert!(plain_options.contains(&SqlOption::KeyValue {
key: Ident::new("COLLATE"),
value: Expr::Identifier(Ident::new("utf8mb4_0900_ai_ci".to_owned()))
}));
assert_eq!(
query.unwrap().body.as_select().unwrap().projection,
vec![SelectItem::UnnamedExpr(Expr::Value(
@ -994,7 +1307,8 @@ fn parse_create_table_both_options_and_as_query() {
_ => unreachable!(),
}
let sql = r"CREATE TABLE foo (id INT(11)) ENGINE=InnoDB AS SELECT 1 DEFAULT CHARSET=utf8mb3";
let sql =
r"CREATE TABLE foo (id INT(11)) ENGINE = InnoDB AS SELECT 1 DEFAULT CHARSET = utf8mb3";
assert!(matches!(
mysql_and_generic().parse_sql_statements(sql),
Err(ParserError::ParserError(_))
@ -1113,6 +1427,7 @@ fn parse_escaped_quote_identifiers_with_escape() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}))
);
}
@ -1165,6 +1480,7 @@ fn parse_escaped_quote_identifiers_with_no_escape() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}))
);
}
@ -1211,6 +1527,7 @@ fn parse_escaped_backticks_with_escape() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}))
);
}
@ -1261,6 +1578,7 @@ fn parse_escaped_backticks_with_no_escape() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}))
);
}
@ -1436,6 +1754,7 @@ fn parse_simple_insert() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1484,6 +1803,7 @@ fn parse_ignore_insert() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1532,6 +1852,7 @@ fn parse_priority_insert() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1577,6 +1898,7 @@ fn parse_priority_insert() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1624,6 +1946,7 @@ fn parse_insert_as() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1686,6 +2009,7 @@ fn parse_insert_as() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1735,6 +2059,7 @@ fn parse_replace_insert() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1775,6 +2100,7 @@ fn parse_empty_row_insert() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1839,6 +2165,7 @@ fn parse_insert_with_on_duplicate_update() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
source
);
@ -1935,11 +2262,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
assert_eq!(&[Ident::new("t"), Ident::new("15to29")], &parts[..]);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Case 2: Qualified column name that starts with digits and on its own represents a number.
@ -1949,11 +2276,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
assert_eq!(&[Ident::new("t"), Ident::new("15e29")], &parts[..]);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Case 3: Unqualified, the same token is parsed as a number.
@ -1967,11 +2294,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
Some(SelectItem::UnnamedExpr(Expr::Value(ValueWithSpan { value, .. }))) => {
assert_eq!(&number("15e29"), value);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Case 4: Quoted simple identifier.
@ -1981,11 +2308,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
Some(SelectItem::UnnamedExpr(Expr::Identifier(name))) => {
assert_eq!(&Ident::with_quote('`', "15e29"), name);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Case 5: Quoted compound identifier.
@ -1998,11 +2325,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
&parts[..]
);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Case 6: Multi-level compound identifiers.
@ -2019,11 +2346,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
&parts[..]
);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
// Case 7: Multi-level compound quoted identifiers.
@ -2040,11 +2367,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
&parts[..]
);
}
proj => panic!("Unexpected projection: {:?}", proj),
proj => panic!("Unexpected projection: {proj:?}"),
},
body => panic!("Unexpected statement body: {:?}", body),
body => panic!("Unexpected statement body: {body:?}"),
},
stmt => panic!("Unexpected statement: {:?}", stmt),
stmt => panic!("Unexpected statement: {stmt:?}"),
}
}
@ -2254,11 +2581,13 @@ fn parse_alter_table_add_column() {
if_exists,
only,
operations,
iceberg,
location: _,
on_cluster: _,
} => {
assert_eq!(name.to_string(), "tab");
assert!(!if_exists);
assert!(!iceberg);
assert!(!only);
assert_eq!(
operations,
@ -2283,8 +2612,7 @@ fn parse_alter_table_add_column() {
if_exists,
only,
operations,
location: _,
on_cluster: _,
..
} => {
assert_eq!(name.to_string(), "tab");
assert!(!if_exists);
@ -2321,8 +2649,7 @@ fn parse_alter_table_add_columns() {
if_exists,
only,
operations,
location: _,
on_cluster: _,
..
} => {
assert_eq!(name.to_string(), "tab");
assert!(!if_exists);
@ -2548,7 +2875,8 @@ fn parse_alter_table_with_algorithm() {
operations,
vec![
AlterTableOperation::DropColumn {
column_name: Ident::new("password_digest"),
has_column_keyword: true,
column_names: vec![Ident::new("password_digest")],
if_exists: false,
drop_behavior: None,
},
@ -2595,7 +2923,8 @@ fn parse_alter_table_with_lock() {
operations,
vec![
AlterTableOperation::DropColumn {
column_name: Ident::new("password_digest"),
has_column_keyword: true,
column_names: vec![Ident::new("password_digest")],
if_exists: false,
drop_behavior: None,
},
@ -2745,6 +3074,7 @@ fn parse_substring_in_select() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}),
query
);
@ -3051,6 +3381,7 @@ fn parse_hex_string_introducer() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
}))
)
}
@ -3283,7 +3614,9 @@ fn parse_grant() {
objects,
grantees,
with_grant_option,
as_grantor: _,
granted_by,
current_grants: _,
} = stmt
{
assert_eq!(
@ -3489,6 +3822,11 @@ fn parse_begin_without_transaction() {
mysql().verified_stmt("BEGIN");
}
#[test]
fn parse_geometric_types_srid_option() {
mysql_and_generic().verified_stmt("CREATE TABLE t (a geometry SRID 4326)");
}
#[test]
fn parse_double_precision() {
mysql().verified_stmt("CREATE TABLE foo (bar DOUBLE)");
@ -3524,6 +3862,7 @@ fn parse_create_trigger() {
assert_eq!(
create_stmt,
Statement::CreateTrigger {
or_alter: false,
or_replace: false,
is_constraint: false,
name: ObjectName::from(vec![Ident::new("emp_stamp")]),
@ -3535,13 +3874,14 @@ fn parse_create_trigger() {
trigger_object: TriggerObject::Row,
include_each: true,
condition: None,
exec_body: TriggerExecBody {
exec_body: Some(TriggerExecBody {
exec_type: TriggerExecBodyType::Function,
func_desc: FunctionDesc {
name: ObjectName::from(vec![Ident::new("emp_stamp")]),
args: None,
}
},
}),
statements: None,
characteristics: None,
}
);
@ -3722,3 +4062,76 @@ fn parse_straight_join() {
mysql()
.verified_stmt("SELECT a.*, b.* FROM table_a STRAIGHT_JOIN table_b AS b ON a.b_id = b.id");
}
#[test]
fn mysql_foreign_key_with_index_name() {
mysql().verified_stmt(
"CREATE TABLE orders (customer_id INT, INDEX idx_customer (customer_id), CONSTRAINT fk_customer FOREIGN KEY idx_customer (customer_id) REFERENCES customers(id))",
);
}
#[test]
fn parse_drop_index() {
let sql = "DROP INDEX idx_name ON table_name";
match mysql().verified_stmt(sql) {
Statement::Drop {
object_type,
if_exists,
names,
cascade,
restrict,
purge,
temporary,
table,
} => {
assert!(!if_exists);
assert_eq!(ObjectType::Index, object_type);
assert_eq!(
vec!["idx_name"],
names.iter().map(ToString::to_string).collect::<Vec<_>>()
);
assert!(!cascade);
assert!(!restrict);
assert!(!purge);
assert!(!temporary);
assert!(table.is_some());
assert_eq!("table_name", table.unwrap().to_string());
}
_ => unreachable!(),
}
}
#[test]
fn parse_alter_table_drop_index() {
assert_matches!(
alter_table_op(
mysql_and_generic().verified_stmt("ALTER TABLE tab DROP INDEX idx_index")
),
AlterTableOperation::DropIndex { name } if name.value == "idx_index"
);
}
#[test]
fn parse_json_member_of() {
mysql().verified_stmt(r#"SELECT 17 MEMBER OF('[23, "abc", 17, "ab", 10]')"#);
let sql = r#"SELECT 'ab' MEMBER OF('[23, "abc", 17, "ab", 10]')"#;
let stmt = mysql().verified_stmt(sql);
match stmt {
Statement::Query(query) => {
let select = query.body.as_select().unwrap();
assert_eq!(
select.projection,
vec![SelectItem::UnnamedExpr(Expr::MemberOf(MemberOf {
value: Box::new(Expr::Value(
Value::SingleQuotedString("ab".to_string()).into()
)),
array: Box::new(Expr::Value(
Value::SingleQuotedString(r#"[23, "abc", 17, "ab", 10]"#.to_string())
.into()
)),
}))]
);
}
_ => panic!("Unexpected statement {stmt}"),
}
}

View file

@ -21,6 +21,7 @@
#[macro_use]
mod test_utils;
use helpers::attached_token::AttachedToken;
use sqlparser::tokenizer::Span;
use test_utils::*;
@ -348,7 +349,7 @@ fn parse_create_table_with_defaults() {
name,
columns,
constraints,
with_options,
table_options,
if_not_exists: false,
external: false,
file_format: None,
@ -485,6 +486,11 @@ fn parse_create_table_with_defaults() {
]
);
assert!(constraints.is_empty());
let with_options = match table_options {
CreateTableOptions::With(options) => options,
_ => unreachable!(),
};
assert_eq!(
with_options,
vec![
@ -600,9 +606,10 @@ fn parse_alter_table_constraints_unique_nulls_distinct() {
.verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE NULLS NOT DISTINCT (c)")
{
Statement::AlterTable { operations, .. } => match &operations[0] {
AlterTableOperation::AddConstraint(TableConstraint::Unique {
nulls_distinct, ..
}) => {
AlterTableOperation::AddConstraint {
constraint: TableConstraint::Unique { nulls_distinct, .. },
..
} => {
assert_eq!(nulls_distinct, &NullsDistinctOption::NotDistinct)
}
_ => unreachable!(),
@ -758,10 +765,7 @@ fn parse_drop_extension() {
#[test]
fn parse_alter_table_alter_column() {
pg().one_statement_parses_to(
"ALTER TABLE tab ALTER COLUMN is_active TYPE TEXT USING 'text'",
"ALTER TABLE tab ALTER COLUMN is_active SET DATA TYPE TEXT USING 'text'",
);
pg().verified_stmt("ALTER TABLE tab ALTER COLUMN is_active TYPE TEXT USING 'text'");
match alter_table_op(
pg().verified_stmt(
@ -777,6 +781,7 @@ fn parse_alter_table_alter_column() {
AlterColumnOperation::SetDataType {
data_type: DataType::Text,
using: Some(using_expr),
had_set: true,
}
);
}
@ -828,8 +833,7 @@ fn parse_alter_table_add_columns() {
if_exists,
only,
operations,
location: _,
on_cluster: _,
..
} => {
assert_eq!(name.to_string(), "tab");
assert!(if_exists);
@ -909,8 +913,7 @@ fn parse_alter_table_owner_to() {
if_exists: _,
only: _,
operations,
location: _,
on_cluster: _,
..
} => {
assert_eq!(name.to_string(), "tab");
assert_eq!(
@ -1326,6 +1329,7 @@ fn parse_copy_to() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
to: true,
target: CopyTarget::File {
@ -2530,12 +2534,12 @@ fn parse_create_indices_with_operator_classes() {
for expected_operator_class in &operator_classes {
let single_column_sql_statement = format!(
"CREATE INDEX the_index_name ON users USING {expected_index_type} (concat_users_name(first_name, last_name){})",
expected_operator_class.as_ref().map(|oc| format!(" {}", oc))
expected_operator_class.as_ref().map(|oc| format!(" {oc}"))
.unwrap_or_default()
);
let multi_column_sql_statement = format!(
"CREATE INDEX the_index_name ON users USING {expected_index_type} (column_name,concat_users_name(first_name, last_name){})",
expected_operator_class.as_ref().map(|oc| format!(" {}", oc))
expected_operator_class.as_ref().map(|oc| format!(" {oc}"))
.unwrap_or_default()
);
@ -2994,6 +2998,7 @@ fn parse_array_subquery_expr() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
filter: None,
null_treatment: None,
@ -3267,7 +3272,7 @@ fn test_fn_arg_with_value_operator() {
assert!(matches!(
&args[..],
&[FunctionArg::ExprNamed { operator: FunctionArgOperator::Value, .. }]
), "Invalid function argument: {:?}", args);
), "Invalid function argument: {args:?}");
}
other => panic!("Expected: JSON_OBJECT('name' VALUE 'value') to be parsed as a function, but got {other:?}"),
}
@ -4098,6 +4103,216 @@ fn parse_update_in_with_subquery() {
pg_and_generic().verified_stmt(r#"WITH "result" AS (UPDATE "Hero" SET "name" = 'Captain America', "number_of_movies" = "number_of_movies" + 1 WHERE "secret_identity" = 'Sam Wilson' RETURNING "id", "name", "secret_identity", "number_of_movies") SELECT * FROM "result""#);
}
#[test]
fn parser_create_function_with_args() {
let sql1 = r#"CREATE OR REPLACE FUNCTION check_strings_different(str1 VARCHAR, str2 VARCHAR) RETURNS BOOLEAN LANGUAGE plpgsql AS $$
BEGIN
IF str1 <> str2 THEN
RETURN TRUE;
ELSE
RETURN FALSE;
END IF;
END;
$$"#;
assert_eq!(
pg_and_generic().verified_stmt(sql1),
Statement::CreateFunction(CreateFunction {
or_alter: false,
or_replace: true,
temporary: false,
name: ObjectName::from(vec![Ident::new("check_strings_different")]),
args: Some(vec![
OperateFunctionArg::with_name(
"str1",
DataType::Varchar(None),
),
OperateFunctionArg::with_name(
"str2",
DataType::Varchar(None),
),
]),
return_type: Some(DataType::Boolean),
language: Some("plpgsql".into()),
behavior: None,
called_on_null: None,
parallel: None,
function_body: Some(CreateFunctionBody::AsBeforeOptions(Expr::Value(
(Value::DollarQuotedString(DollarQuotedString {value: "\nBEGIN\n IF str1 <> str2 THEN\n RETURN TRUE;\n ELSE\n RETURN FALSE;\n END IF;\nEND;\n".to_owned(), tag: None})).with_empty_span()
))),
if_not_exists: false,
using: None,
determinism_specifier: None,
options: None,
remote_connection: None,
})
);
let sql2 = r#"CREATE OR REPLACE FUNCTION check_not_zero(int1 INT) RETURNS BOOLEAN LANGUAGE plpgsql AS $$
BEGIN
IF int1 <> 0 THEN
RETURN TRUE;
ELSE
RETURN FALSE;
END IF;
END;
$$"#;
assert_eq!(
pg_and_generic().verified_stmt(sql2),
Statement::CreateFunction(CreateFunction {
or_alter: false,
or_replace: true,
temporary: false,
name: ObjectName::from(vec![Ident::new("check_not_zero")]),
args: Some(vec![
OperateFunctionArg::with_name(
"int1",
DataType::Int(None)
)
]),
return_type: Some(DataType::Boolean),
language: Some("plpgsql".into()),
behavior: None,
called_on_null: None,
parallel: None,
function_body: Some(CreateFunctionBody::AsBeforeOptions(Expr::Value(
(Value::DollarQuotedString(DollarQuotedString {value: "\nBEGIN\n IF int1 <> 0 THEN\n RETURN TRUE;\n ELSE\n RETURN FALSE;\n END IF;\nEND;\n".to_owned(), tag: None})).with_empty_span()
))),
if_not_exists: false,
using: None,
determinism_specifier: None,
options: None,
remote_connection: None,
})
);
let sql3 = r#"CREATE OR REPLACE FUNCTION check_values_different(a INT, b INT) RETURNS BOOLEAN LANGUAGE plpgsql AS $$
BEGIN
IF a <> b THEN
RETURN TRUE;
ELSE
RETURN FALSE;
END IF;
END;
$$"#;
assert_eq!(
pg_and_generic().verified_stmt(sql3),
Statement::CreateFunction(CreateFunction {
or_alter: false,
or_replace: true,
temporary: false,
name: ObjectName::from(vec![Ident::new("check_values_different")]),
args: Some(vec![
OperateFunctionArg::with_name(
"a",
DataType::Int(None)
),
OperateFunctionArg::with_name(
"b",
DataType::Int(None)
),
]),
return_type: Some(DataType::Boolean),
language: Some("plpgsql".into()),
behavior: None,
called_on_null: None,
parallel: None,
function_body: Some(CreateFunctionBody::AsBeforeOptions(Expr::Value(
(Value::DollarQuotedString(DollarQuotedString {value: "\nBEGIN\n IF a <> b THEN\n RETURN TRUE;\n ELSE\n RETURN FALSE;\n END IF;\nEND;\n".to_owned(), tag: None})).with_empty_span()
))),
if_not_exists: false,
using: None,
determinism_specifier: None,
options: None,
remote_connection: None,
})
);
let sql4 = r#"CREATE OR REPLACE FUNCTION check_values_different(int1 INT, int2 INT) RETURNS BOOLEAN LANGUAGE plpgsql AS $$
BEGIN
IF int1 <> int2 THEN
RETURN TRUE;
ELSE
RETURN FALSE;
END IF;
END;
$$"#;
assert_eq!(
pg_and_generic().verified_stmt(sql4),
Statement::CreateFunction(CreateFunction {
or_alter: false,
or_replace: true,
temporary: false,
name: ObjectName::from(vec![Ident::new("check_values_different")]),
args: Some(vec![
OperateFunctionArg::with_name(
"int1",
DataType::Int(None)
),
OperateFunctionArg::with_name(
"int2",
DataType::Int(None)
),
]),
return_type: Some(DataType::Boolean),
language: Some("plpgsql".into()),
behavior: None,
called_on_null: None,
parallel: None,
function_body: Some(CreateFunctionBody::AsBeforeOptions(Expr::Value(
(Value::DollarQuotedString(DollarQuotedString {value: "\nBEGIN\n IF int1 <> int2 THEN\n RETURN TRUE;\n ELSE\n RETURN FALSE;\n END IF;\nEND;\n".to_owned(), tag: None})).with_empty_span()
))),
if_not_exists: false,
using: None,
determinism_specifier: None,
options: None,
remote_connection: None,
})
);
let sql5 = r#"CREATE OR REPLACE FUNCTION foo(a TIMESTAMP WITH TIME ZONE, b VARCHAR) RETURNS BOOLEAN LANGUAGE plpgsql AS $$
BEGIN
RETURN TRUE;
END;
$$"#;
assert_eq!(
pg_and_generic().verified_stmt(sql5),
Statement::CreateFunction(CreateFunction {
or_alter: false,
or_replace: true,
temporary: false,
name: ObjectName::from(vec![Ident::new("foo")]),
args: Some(vec![
OperateFunctionArg::with_name(
"a",
DataType::Timestamp(None, TimezoneInfo::WithTimeZone)
),
OperateFunctionArg::with_name("b", DataType::Varchar(None)),
]),
return_type: Some(DataType::Boolean),
language: Some("plpgsql".into()),
behavior: None,
called_on_null: None,
parallel: None,
function_body: Some(CreateFunctionBody::AsBeforeOptions(Expr::Value(
(Value::DollarQuotedString(DollarQuotedString {
value: "\n BEGIN\n RETURN TRUE;\n END;\n ".to_owned(),
tag: None
}))
.with_empty_span()
))),
if_not_exists: false,
using: None,
determinism_specifier: None,
options: None,
remote_connection: None,
})
);
let incorrect_sql = "CREATE FUNCTION add(function(struct<a,b> int64), b INTEGER) RETURNS INTEGER LANGUAGE SQL IMMUTABLE STRICT PARALLEL SAFE AS 'select $1 + $2;'";
assert!(pg().parse_sql_statements(incorrect_sql).is_err(),);
}
#[test]
fn parse_create_function() {
let sql = "CREATE FUNCTION add(INTEGER, INTEGER) RETURNS INTEGER LANGUAGE SQL IMMUTABLE STRICT PARALLEL SAFE AS 'select $1 + $2;'";
@ -4239,6 +4454,66 @@ fn parse_drop_function() {
);
}
#[test]
fn parse_drop_domain() {
let sql = "DROP DOMAIN IF EXISTS jpeg_domain";
assert_eq!(
pg().verified_stmt(sql),
Statement::DropDomain(DropDomain {
if_exists: true,
name: ObjectName::from(vec![Ident {
value: "jpeg_domain".to_string(),
quote_style: None,
span: Span::empty(),
}]),
drop_behavior: None
})
);
let sql = "DROP DOMAIN jpeg_domain";
assert_eq!(
pg().verified_stmt(sql),
Statement::DropDomain(DropDomain {
if_exists: false,
name: ObjectName::from(vec![Ident {
value: "jpeg_domain".to_string(),
quote_style: None,
span: Span::empty(),
}]),
drop_behavior: None
})
);
let sql = "DROP DOMAIN IF EXISTS jpeg_domain CASCADE";
assert_eq!(
pg().verified_stmt(sql),
Statement::DropDomain(DropDomain {
if_exists: true,
name: ObjectName::from(vec![Ident {
value: "jpeg_domain".to_string(),
quote_style: None,
span: Span::empty(),
}]),
drop_behavior: Some(DropBehavior::Cascade)
})
);
let sql = "DROP DOMAIN IF EXISTS jpeg_domain RESTRICT";
assert_eq!(
pg().verified_stmt(sql),
Statement::DropDomain(DropDomain {
if_exists: true,
name: ObjectName::from(vec![Ident {
value: "jpeg_domain".to_string(),
quote_style: None,
span: Span::empty(),
}]),
drop_behavior: Some(DropBehavior::Restrict)
})
);
}
#[test]
fn parse_drop_procedure() {
let sql = "DROP PROCEDURE IF EXISTS test_proc";
@ -4512,13 +4787,13 @@ fn parse_truncate() {
let table_name = ObjectName::from(vec![Ident::new("db"), Ident::new("table_name")]);
let table_names = vec![TruncateTableTarget {
name: table_name.clone(),
only: false,
}];
assert_eq!(
Statement::Truncate {
table_names,
partitions: None,
table: false,
only: false,
identity: None,
cascade: None,
on_cluster: None,
@ -4535,6 +4810,7 @@ fn parse_truncate_with_options() {
let table_name = ObjectName::from(vec![Ident::new("db"), Ident::new("table_name")]);
let table_names = vec![TruncateTableTarget {
name: table_name.clone(),
only: true,
}];
assert_eq!(
@ -4542,7 +4818,6 @@ fn parse_truncate_with_options() {
table_names,
partitions: None,
table: true,
only: true,
identity: Some(TruncateIdentityOption::Restart),
cascade: Some(CascadeOption::Cascade),
on_cluster: None,
@ -4563,9 +4838,11 @@ fn parse_truncate_with_table_list() {
let table_names = vec![
TruncateTableTarget {
name: table_name_a.clone(),
only: false,
},
TruncateTableTarget {
name: table_name_b.clone(),
only: false,
},
];
@ -4574,7 +4851,6 @@ fn parse_truncate_with_table_list() {
table_names,
partitions: None,
table: true,
only: false,
identity: Some(TruncateIdentityOption::Restart),
cascade: Some(CascadeOption::Cascade),
on_cluster: None,
@ -4606,7 +4882,6 @@ fn parse_create_table_with_alias() {
name,
columns,
constraints,
with_options: _with_options,
if_not_exists: false,
external: false,
file_format: None,
@ -4785,6 +5060,7 @@ fn test_simple_postgres_insert_with_alias() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
assignments: vec![],
partitioned: None,
@ -4856,6 +5132,7 @@ fn test_simple_postgres_insert_with_alias() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
assignments: vec![],
partitioned: None,
@ -4925,6 +5202,7 @@ fn test_simple_insert_with_quoted_alias() {
for_clause: None,
settings: None,
format_clause: None,
pipe_operators: vec![],
})),
assignments: vec![],
partitioned: None,
@ -4979,7 +5257,10 @@ fn parse_at_time_zone() {
left: Box::new(Expr::AtTimeZone {
timestamp: Box::new(Expr::TypedString {
data_type: DataType::Timestamp(None, TimezoneInfo::None),
value: Value::SingleQuotedString("2001-09-28 01:00".to_string()),
value: ValueWithSpan {
value: Value::SingleQuotedString("2001-09-28 01:00".to_string()),
span: Span::empty(),
},
}),
time_zone: Box::new(Expr::Cast {
kind: CastKind::DoubleColon,
@ -5013,7 +5294,11 @@ fn parse_at_time_zone() {
fn parse_create_table_with_options() {
let sql = "CREATE TABLE t (c INT) WITH (foo = 'bar', a = 123)";
match pg().verified_stmt(sql) {
Statement::CreateTable(CreateTable { with_options, .. }) => {
Statement::CreateTable(CreateTable { table_options, .. }) => {
let with_options = match table_options {
CreateTableOptions::With(options) => options,
_ => unreachable!(),
};
assert_eq!(
vec![
SqlOption::KeyValue {
@ -5080,10 +5365,109 @@ fn test_escaped_string_literal() {
}
}
#[test]
fn parse_create_domain() {
let sql1 = "CREATE DOMAIN my_domain AS INTEGER CHECK (VALUE > 0)";
let expected = Statement::CreateDomain(CreateDomain {
name: ObjectName::from(vec![Ident::new("my_domain")]),
data_type: DataType::Integer(None),
collation: None,
default: None,
constraints: vec![TableConstraint::Check {
name: None,
expr: Box::new(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new("VALUE"))),
op: BinaryOperator::Gt,
right: Box::new(Expr::Value(test_utils::number("0").into())),
}),
enforced: None,
}],
});
assert_eq!(pg().verified_stmt(sql1), expected);
let sql2 = "CREATE DOMAIN my_domain AS INTEGER COLLATE \"en_US\" CHECK (VALUE > 0)";
let expected = Statement::CreateDomain(CreateDomain {
name: ObjectName::from(vec![Ident::new("my_domain")]),
data_type: DataType::Integer(None),
collation: Some(Ident::with_quote('"', "en_US")),
default: None,
constraints: vec![TableConstraint::Check {
name: None,
expr: Box::new(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new("VALUE"))),
op: BinaryOperator::Gt,
right: Box::new(Expr::Value(test_utils::number("0").into())),
}),
enforced: None,
}],
});
assert_eq!(pg().verified_stmt(sql2), expected);
let sql3 = "CREATE DOMAIN my_domain AS INTEGER DEFAULT 1 CHECK (VALUE > 0)";
let expected = Statement::CreateDomain(CreateDomain {
name: ObjectName::from(vec![Ident::new("my_domain")]),
data_type: DataType::Integer(None),
collation: None,
default: Some(Expr::Value(test_utils::number("1").into())),
constraints: vec![TableConstraint::Check {
name: None,
expr: Box::new(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new("VALUE"))),
op: BinaryOperator::Gt,
right: Box::new(Expr::Value(test_utils::number("0").into())),
}),
enforced: None,
}],
});
assert_eq!(pg().verified_stmt(sql3), expected);
let sql4 = "CREATE DOMAIN my_domain AS INTEGER COLLATE \"en_US\" DEFAULT 1 CHECK (VALUE > 0)";
let expected = Statement::CreateDomain(CreateDomain {
name: ObjectName::from(vec![Ident::new("my_domain")]),
data_type: DataType::Integer(None),
collation: Some(Ident::with_quote('"', "en_US")),
default: Some(Expr::Value(test_utils::number("1").into())),
constraints: vec![TableConstraint::Check {
name: None,
expr: Box::new(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new("VALUE"))),
op: BinaryOperator::Gt,
right: Box::new(Expr::Value(test_utils::number("0").into())),
}),
enforced: None,
}],
});
assert_eq!(pg().verified_stmt(sql4), expected);
let sql5 = "CREATE DOMAIN my_domain AS INTEGER CONSTRAINT my_constraint CHECK (VALUE > 0)";
let expected = Statement::CreateDomain(CreateDomain {
name: ObjectName::from(vec![Ident::new("my_domain")]),
data_type: DataType::Integer(None),
collation: None,
default: None,
constraints: vec![TableConstraint::Check {
name: Some(Ident::new("my_constraint")),
expr: Box::new(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident::new("VALUE"))),
op: BinaryOperator::Gt,
right: Box::new(Expr::Value(test_utils::number("0").into())),
}),
enforced: None,
}],
});
assert_eq!(pg().verified_stmt(sql5), expected);
}
#[test]
fn parse_create_simple_before_insert_trigger() {
let sql = "CREATE TRIGGER check_insert BEFORE INSERT ON accounts FOR EACH ROW EXECUTE FUNCTION check_account_insert";
let expected = Statement::CreateTrigger {
or_alter: false,
or_replace: false,
is_constraint: false,
name: ObjectName::from(vec![Ident::new("check_insert")]),
@ -5095,13 +5479,14 @@ fn parse_create_simple_before_insert_trigger() {
trigger_object: TriggerObject::Row,
include_each: true,
condition: None,
exec_body: TriggerExecBody {
exec_body: Some(TriggerExecBody {
exec_type: TriggerExecBodyType::Function,
func_desc: FunctionDesc {
name: ObjectName::from(vec![Ident::new("check_account_insert")]),
args: None,
},
},
}),
statements: None,
characteristics: None,
};
@ -5112,6 +5497,7 @@ fn parse_create_simple_before_insert_trigger() {
fn parse_create_after_update_trigger_with_condition() {
let sql = "CREATE TRIGGER check_update AFTER UPDATE ON accounts FOR EACH ROW WHEN (NEW.balance > 10000) EXECUTE FUNCTION check_account_update";
let expected = Statement::CreateTrigger {
or_alter: false,
or_replace: false,
is_constraint: false,
name: ObjectName::from(vec![Ident::new("check_update")]),
@ -5130,13 +5516,14 @@ fn parse_create_after_update_trigger_with_condition() {
op: BinaryOperator::Gt,
right: Box::new(Expr::value(number("10000"))),
}))),
exec_body: TriggerExecBody {
exec_body: Some(TriggerExecBody {
exec_type: TriggerExecBodyType::Function,
func_desc: FunctionDesc {
name: ObjectName::from(vec![Ident::new("check_account_update")]),
args: None,
},
},
}),
statements: None,
characteristics: None,
};
@ -5147,6 +5534,7 @@ fn parse_create_after_update_trigger_with_condition() {
fn parse_create_instead_of_delete_trigger() {
let sql = "CREATE TRIGGER check_delete INSTEAD OF DELETE ON accounts FOR EACH ROW EXECUTE FUNCTION check_account_deletes";
let expected = Statement::CreateTrigger {
or_alter: false,
or_replace: false,
is_constraint: false,
name: ObjectName::from(vec![Ident::new("check_delete")]),
@ -5158,13 +5546,14 @@ fn parse_create_instead_of_delete_trigger() {
trigger_object: TriggerObject::Row,
include_each: true,
condition: None,
exec_body: TriggerExecBody {
exec_body: Some(TriggerExecBody {
exec_type: TriggerExecBodyType::Function,
func_desc: FunctionDesc {
name: ObjectName::from(vec![Ident::new("check_account_deletes")]),
args: None,
},
},
}),
statements: None,
characteristics: None,
};
@ -5175,6 +5564,7 @@ fn parse_create_instead_of_delete_trigger() {
fn parse_create_trigger_with_multiple_events_and_deferrable() {
let sql = "CREATE CONSTRAINT TRIGGER check_multiple_events BEFORE INSERT OR UPDATE OR DELETE ON accounts DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE FUNCTION check_account_changes";
let expected = Statement::CreateTrigger {
or_alter: false,
or_replace: false,
is_constraint: true,
name: ObjectName::from(vec![Ident::new("check_multiple_events")]),
@ -5190,13 +5580,14 @@ fn parse_create_trigger_with_multiple_events_and_deferrable() {
trigger_object: TriggerObject::Row,
include_each: true,
condition: None,
exec_body: TriggerExecBody {
exec_body: Some(TriggerExecBody {
exec_type: TriggerExecBodyType::Function,
func_desc: FunctionDesc {
name: ObjectName::from(vec![Ident::new("check_account_changes")]),
args: None,
},
},
}),
statements: None,
characteristics: Some(ConstraintCharacteristics {
deferrable: Some(true),
initially: Some(DeferrableInitial::Deferred),
@ -5211,6 +5602,7 @@ fn parse_create_trigger_with_multiple_events_and_deferrable() {
fn parse_create_trigger_with_referencing() {
let sql = "CREATE TRIGGER check_referencing BEFORE INSERT ON accounts REFERENCING NEW TABLE AS new_accounts OLD TABLE AS old_accounts FOR EACH ROW EXECUTE FUNCTION check_account_referencing";
let expected = Statement::CreateTrigger {
or_alter: false,
or_replace: false,
is_constraint: false,
name: ObjectName::from(vec![Ident::new("check_referencing")]),
@ -5233,13 +5625,14 @@ fn parse_create_trigger_with_referencing() {
trigger_object: TriggerObject::Row,
include_each: true,
condition: None,
exec_body: TriggerExecBody {
exec_body: Some(TriggerExecBody {
exec_type: TriggerExecBodyType::Function,
func_desc: FunctionDesc {
name: ObjectName::from(vec![Ident::new("check_account_referencing")]),
args: None,
},
},
}),
statements: None,
characteristics: None,
};
@ -5259,7 +5652,7 @@ fn parse_create_trigger_invalid_cases() {
),
(
"CREATE TRIGGER check_update TOMORROW UPDATE ON accounts EXECUTE FUNCTION check_account_update",
"Expected: one of BEFORE or AFTER or INSTEAD, found: TOMORROW"
"Expected: one of FOR or BEFORE or AFTER or INSTEAD, found: TOMORROW"
),
(
"CREATE TRIGGER check_update BEFORE SAVE ON accounts EXECUTE FUNCTION check_account_update",
@ -5288,7 +5681,7 @@ fn parse_drop_trigger() {
"DROP TRIGGER{} check_update ON table_name{}",
if if_exists { " IF EXISTS" } else { "" },
option
.map(|o| format!(" {}", o))
.map(|o| format!(" {o}"))
.unwrap_or_else(|| "".to_string())
);
assert_eq!(
@ -5382,8 +5775,7 @@ fn parse_trigger_related_functions() {
// Now we parse the statements and check if they are parsed correctly.
let mut statements = pg()
.parse_sql_statements(&format!(
"{}{}{}{}",
sql_table_creation, sql_create_function, sql_create_trigger, sql_drop_trigger
"{sql_table_creation}{sql_create_function}{sql_create_trigger}{sql_drop_trigger}"
))
.unwrap();
@ -5441,19 +5833,13 @@ fn parse_trigger_related_functions() {
storage: None,
location: None
}),
table_properties: vec![],
with_options: vec![],
file_format: None,
location: None,
query: None,
without_rowid: false,
like: None,
clone: None,
engine: None,
comment: None,
auto_increment_offset: None,
default_charset: None,
collation: None,
on_commit: None,
on_cluster: None,
primary_key: None,
@ -5461,7 +5847,6 @@ fn parse_trigger_related_functions() {
partition_by: None,
cluster_by: None,
clustered_by: None,
options: None,
inherits: None,
strict: false,
copy_grants: false,
@ -5478,6 +5863,7 @@ fn parse_trigger_related_functions() {
catalog: None,
catalog_sync: None,
storage_serialization_policy: None,
table_options: CreateTableOptions::None
}
);
@ -5523,6 +5909,7 @@ fn parse_trigger_related_functions() {
assert_eq!(
create_trigger,
Statement::CreateTrigger {
or_alter: false,
or_replace: false,
is_constraint: false,
name: ObjectName::from(vec![Ident::new("emp_stamp")]),
@ -5534,13 +5921,14 @@ fn parse_trigger_related_functions() {
trigger_object: TriggerObject::Row,
include_each: true,
condition: None,
exec_body: TriggerExecBody {
exec_body: Some(TriggerExecBody {
exec_type: TriggerExecBodyType::Function,
func_desc: FunctionDesc {
name: ObjectName::from(vec![Ident::new("emp_stamp")]),
args: None,
}
},
}),
statements: None,
characteristics: None
}
);
@ -5787,3 +6175,180 @@ fn parse_varbit_datatype() {
_ => unreachable!(),
}
}
#[test]
fn parse_alter_table_replica_identity() {
match pg_and_generic().verified_stmt("ALTER TABLE foo REPLICA IDENTITY FULL") {
Statement::AlterTable { operations, .. } => {
assert_eq!(
operations,
vec![AlterTableOperation::ReplicaIdentity {
identity: ReplicaIdentity::Full
}]
);
}
_ => unreachable!(),
}
match pg_and_generic().verified_stmt("ALTER TABLE foo REPLICA IDENTITY USING INDEX foo_idx") {
Statement::AlterTable { operations, .. } => {
assert_eq!(
operations,
vec![AlterTableOperation::ReplicaIdentity {
identity: ReplicaIdentity::Index("foo_idx".into())
}]
);
}
_ => unreachable!(),
}
}
#[test]
fn parse_ts_datatypes() {
match pg_and_generic().verified_stmt("CREATE TABLE foo (x TSVECTOR)") {
Statement::CreateTable(CreateTable { columns, .. }) => {
assert_eq!(
columns,
vec![ColumnDef {
name: "x".into(),
data_type: DataType::TsVector,
options: vec![],
}]
);
}
_ => unreachable!(),
}
match pg_and_generic().verified_stmt("CREATE TABLE foo (x TSQUERY)") {
Statement::CreateTable(CreateTable { columns, .. }) => {
assert_eq!(
columns,
vec![ColumnDef {
name: "x".into(),
data_type: DataType::TsQuery,
options: vec![],
}]
);
}
_ => unreachable!(),
}
}
#[test]
fn parse_alter_table_constraint_not_valid() {
match pg_and_generic().verified_stmt(
"ALTER TABLE foo ADD CONSTRAINT bar FOREIGN KEY (baz) REFERENCES other(ref) NOT VALID",
) {
Statement::AlterTable { operations, .. } => {
assert_eq!(
operations,
vec![AlterTableOperation::AddConstraint {
constraint: TableConstraint::ForeignKey {
name: Some("bar".into()),
index_name: None,
columns: vec!["baz".into()],
foreign_table: ObjectName::from(vec!["other".into()]),
referred_columns: vec!["ref".into()],
on_delete: None,
on_update: None,
characteristics: None,
},
not_valid: true,
}]
);
}
_ => unreachable!(),
}
}
#[test]
fn parse_alter_table_validate_constraint() {
match pg_and_generic().verified_stmt("ALTER TABLE foo VALIDATE CONSTRAINT bar") {
Statement::AlterTable { operations, .. } => {
assert_eq!(
operations,
vec![AlterTableOperation::ValidateConstraint { name: "bar".into() }]
);
}
_ => unreachable!(),
}
}
#[test]
fn parse_create_server() {
let test_cases = vec![
(
"CREATE SERVER myserver FOREIGN DATA WRAPPER postgres_fdw",
CreateServerStatement {
name: ObjectName::from(vec!["myserver".into()]),
if_not_exists: false,
server_type: None,
version: None,
foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]),
options: None,
},
),
(
"CREATE SERVER IF NOT EXISTS myserver TYPE 'server_type' VERSION 'server_version' FOREIGN DATA WRAPPER postgres_fdw",
CreateServerStatement {
name: ObjectName::from(vec!["myserver".into()]),
if_not_exists: true,
server_type: Some(Ident {
value: "server_type".to_string(),
quote_style: Some('\''),
span: Span::empty(),
}),
version: Some(Ident {
value: "server_version".to_string(),
quote_style: Some('\''),
span: Span::empty(),
}),
foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]),
options: None,
}
),
(
"CREATE SERVER myserver2 FOREIGN DATA WRAPPER postgres_fdw OPTIONS (host 'foo', dbname 'foodb', port '5432')",
CreateServerStatement {
name: ObjectName::from(vec!["myserver2".into()]),
if_not_exists: false,
server_type: None,
version: None,
foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]),
options: Some(vec![
CreateServerOption {
key: "host".into(),
value: Ident {
value: "foo".to_string(),
quote_style: Some('\''),
span: Span::empty(),
},
},
CreateServerOption {
key: "dbname".into(),
value: Ident {
value: "foodb".to_string(),
quote_style: Some('\''),
span: Span::empty(),
},
},
CreateServerOption {
key: "port".into(),
value: Ident {
value: "5432".to_string(),
quote_style: Some('\''),
span: Span::empty(),
},
},
]),
}
)
];
for (sql, expected) in test_cases {
let Statement::CreateServer(stmt) = pg_and_generic().verified_stmt(sql) else {
unreachable!()
};
assert_eq!(stmt, expected);
}
}

View file

@ -402,3 +402,8 @@ fn parse_extract_single_quotes() {
fn parse_string_literal_backslash_escape() {
redshift().one_statement_parses_to(r#"SELECT 'l\'auto'"#, "SELECT 'l''auto'");
}
#[test]
fn parse_utf8_multibyte_idents() {
redshift().verified_stmt("SELECT 🚀.city AS 🎸 FROM customers AS 🚀");
}

View file

@ -270,8 +270,8 @@ fn test_snowflake_create_table_with_tag() {
assert_eq!("my_table", name.to_string());
assert_eq!(
Some(vec![
Tag::new("A".into(), "TAG A".to_string()),
Tag::new("B".into(), "TAG B".to_string())
Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".to_string()),
Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".to_string())
]),
with_tags
);
@ -291,8 +291,8 @@ fn test_snowflake_create_table_with_tag() {
assert_eq!("my_table", name.to_string());
assert_eq!(
Some(vec![
Tag::new("A".into(), "TAG A".to_string()),
Tag::new("B".into(), "TAG B".to_string())
Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".to_string()),
Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".to_string())
]),
with_tags
);
@ -446,19 +446,56 @@ fn test_snowflake_create_table_if_not_exists() {
}
_ => unreachable!(),
}
for (sql, parse_to) in [
(
r#"CREATE TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#,
r#"CREATE TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#,
),
(
r#"CREATE TABLE "A"."B"."C" IF NOT EXISTS (v VARIANT)"#,
r#"CREATE TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#,
),
(
r#"CREATE TRANSIENT TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#,
r#"CREATE TRANSIENT TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#,
),
(
r#"CREATE TRANSIENT TABLE "A"."B"."C" IF NOT EXISTS (v VARIANT)"#,
r#"CREATE TRANSIENT TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#,
),
] {
snowflake().one_statement_parses_to(sql, parse_to);
}
}
#[test]
fn test_snowflake_create_table_cluster_by() {
match snowflake().verified_stmt("CREATE TABLE my_table (a INT) CLUSTER BY (a, b)") {
match snowflake().verified_stmt("CREATE TABLE my_table (a INT) CLUSTER BY (a, b, my_func(c))") {
Statement::CreateTable(CreateTable {
name, cluster_by, ..
}) => {
assert_eq!("my_table", name.to_string());
assert_eq!(
Some(WrappedCollection::Parentheses(vec![
Ident::new("a"),
Ident::new("b"),
Expr::Identifier(Ident::new("a")),
Expr::Identifier(Ident::new("b")),
Expr::Function(Function {
name: ObjectName::from(vec![Ident::new("my_func")]),
uses_odbc_syntax: false,
parameters: FunctionArguments::None,
args: FunctionArguments::List(FunctionArgumentList {
args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(
Expr::Identifier(Ident::new("c"))
))],
duplicate_treatment: None,
clauses: vec![],
}),
filter: None,
null_treatment: None,
over: None,
within_group: vec![],
}),
])),
cluster_by
)
@ -470,9 +507,22 @@ fn test_snowflake_create_table_cluster_by() {
#[test]
fn test_snowflake_create_table_comment() {
match snowflake().verified_stmt("CREATE TABLE my_table (a INT) COMMENT = 'some comment'") {
Statement::CreateTable(CreateTable { name, comment, .. }) => {
Statement::CreateTable(CreateTable {
name,
table_options,
..
}) => {
assert_eq!("my_table", name.to_string());
assert_eq!("some comment", comment.unwrap().to_string());
let plain_options = match table_options {
CreateTableOptions::Plain(options) => options,
_ => unreachable!(),
};
let comment = match plain_options.first().unwrap() {
SqlOption::Comment(CommentDef::WithEq(c))
| SqlOption::Comment(CommentDef::WithoutEq(c)) => c,
_ => unreachable!(),
};
assert_eq!("some comment", comment);
}
_ => unreachable!(),
}
@ -681,7 +731,7 @@ fn test_snowflake_create_table_with_columns_masking_policy() {
option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy(
ColumnPolicyProperty {
with,
policy_name: "p".into(),
policy_name: ObjectName::from(vec![Ident::new("p")]),
using_columns,
}
))
@ -715,7 +765,7 @@ fn test_snowflake_create_table_with_columns_projection_policy() {
option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(
ColumnPolicyProperty {
with,
policy_name: "p".into(),
policy_name: ObjectName::from(vec![Ident::new("p")]),
using_columns: None,
}
))
@ -752,8 +802,14 @@ fn test_snowflake_create_table_with_columns_tags() {
option: ColumnOption::Tags(TagsColumnOption {
with,
tags: vec![
Tag::new("A".into(), "TAG A".into()),
Tag::new("B".into(), "TAG B".into()),
Tag::new(
ObjectName::from(vec![Ident::new("A")]),
"TAG A".into()
),
Tag::new(
ObjectName::from(vec![Ident::new("B")]),
"TAG B".into()
),
]
}),
}],
@ -796,7 +852,7 @@ fn test_snowflake_create_table_with_several_column_options() {
option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy(
ColumnPolicyProperty {
with: true,
policy_name: "p1".into(),
policy_name: ObjectName::from(vec![Ident::new("p1")]),
using_columns: Some(vec!["a".into(), "b".into()]),
}
)),
@ -806,8 +862,14 @@ fn test_snowflake_create_table_with_several_column_options() {
option: ColumnOption::Tags(TagsColumnOption {
with: true,
tags: vec![
Tag::new("A".into(), "TAG A".into()),
Tag::new("B".into(), "TAG B".into()),
Tag::new(
ObjectName::from(vec![Ident::new("A")]),
"TAG A".into()
),
Tag::new(
ObjectName::from(vec![Ident::new("B")]),
"TAG B".into()
),
]
}),
}
@ -828,7 +890,7 @@ fn test_snowflake_create_table_with_several_column_options() {
option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(
ColumnPolicyProperty {
with: false,
policy_name: "p2".into(),
policy_name: ObjectName::from(vec![Ident::new("p2")]),
using_columns: None,
}
)),
@ -838,8 +900,14 @@ fn test_snowflake_create_table_with_several_column_options() {
option: ColumnOption::Tags(TagsColumnOption {
with: false,
tags: vec![
Tag::new("C".into(), "TAG C".into()),
Tag::new("D".into(), "TAG D".into()),
Tag::new(
ObjectName::from(vec![Ident::new("C")]),
"TAG C".into()
),
Tag::new(
ObjectName::from(vec![Ident::new("D")]),
"TAG D".into()
),
]
}),
}
@ -869,8 +937,8 @@ fn test_snowflake_create_iceberg_table_all_options() {
assert_eq!("my_table", name.to_string());
assert_eq!(
Some(WrappedCollection::Parentheses(vec![
Ident::new("a"),
Ident::new("b"),
Expr::Identifier(Ident::new("a")),
Expr::Identifier(Ident::new("b")),
])),
cluster_by
);
@ -892,8 +960,8 @@ fn test_snowflake_create_iceberg_table_all_options() {
with_aggregation_policy.map(|name| name.to_string())
);
assert_eq!(Some(vec![
Tag::new("A".into(), "TAG A".into()),
Tag::new("B".into(), "TAG B".into()),
Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".into()),
Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".into()),
]), with_tags);
}
@ -1578,6 +1646,13 @@ fn test_alter_table_clustering() {
snowflake_and_generic().verified_stmt("ALTER TABLE tbl RESUME RECLUSTER");
}
#[test]
fn test_alter_iceberg_table() {
snowflake_and_generic().verified_stmt("ALTER ICEBERG TABLE tbl DROP CLUSTERING KEY");
snowflake_and_generic().verified_stmt("ALTER ICEBERG TABLE tbl SUSPEND RECLUSTER");
snowflake_and_generic().verified_stmt("ALTER ICEBERG TABLE tbl RESUME RECLUSTER");
}
#[test]
fn test_drop_stage() {
match snowflake_and_generic().verified_stmt("DROP STAGE s1") {
@ -2453,10 +2528,7 @@ fn test_snowflake_stage_object_names_into_location() {
.zip(allowed_object_names.iter_mut())
{
let (formatted_name, object_name) = it;
let sql = format!(
"COPY INTO {} FROM 'gcs://mybucket/./../a.csv'",
formatted_name
);
let sql = format!("COPY INTO {formatted_name} FROM 'gcs://mybucket/./../a.csv'");
match snowflake().verified_stmt(&sql) {
Statement::CopyIntoSnowflake { into, .. } => {
assert_eq!(into.0, object_name.0)
@ -2479,10 +2551,7 @@ fn test_snowflake_stage_object_names_into_table() {
.zip(allowed_object_names.iter_mut())
{
let (formatted_name, object_name) = it;
let sql = format!(
"COPY INTO {} FROM 'gcs://mybucket/./../a.csv'",
formatted_name
);
let sql = format!("COPY INTO {formatted_name} FROM 'gcs://mybucket/./../a.csv'");
match snowflake().verified_stmt(&sql) {
Statement::CopyIntoSnowflake { into, .. } => {
assert_eq!(into.0, object_name.0)
@ -2963,7 +3032,7 @@ fn parse_use() {
for object_name in &valid_object_names {
// Test single identifier without quotes
assert_eq!(
snowflake().verified_stmt(&format!("USE {}", object_name)),
snowflake().verified_stmt(&format!("USE {object_name}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
object_name.to_string()
)])))
@ -2971,7 +3040,7 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single identifier with different type of quotes
assert_eq!(
snowflake().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
snowflake().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
quote,
object_name.to_string(),
@ -2983,7 +3052,9 @@ fn parse_use() {
for &quote in &quote_styles {
// Test double identifier with different type of quotes
assert_eq!(
snowflake().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)),
snowflake().verified_stmt(&format!(
"USE {quote}CATALOG{quote}.{quote}my_schema{quote}"
)),
Statement::Use(Use::Object(ObjectName::from(vec![
Ident::with_quote(quote, "CATALOG"),
Ident::with_quote(quote, "my_schema")
@ -3002,35 +3073,37 @@ fn parse_use() {
for &quote in &quote_styles {
// Test single and double identifier with keyword and different type of quotes
assert_eq!(
snowflake().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)),
snowflake().verified_stmt(&format!("USE DATABASE {quote}my_database{quote}")),
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
quote,
"my_database".to_string(),
)])))
);
assert_eq!(
snowflake().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)),
snowflake().verified_stmt(&format!("USE SCHEMA {quote}my_schema{quote}")),
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
quote,
"my_schema".to_string(),
)])))
);
assert_eq!(
snowflake().verified_stmt(&format!("USE SCHEMA {0}CATALOG{0}.{0}my_schema{0}", quote)),
snowflake().verified_stmt(&format!(
"USE SCHEMA {quote}CATALOG{quote}.{quote}my_schema{quote}"
)),
Statement::Use(Use::Schema(ObjectName::from(vec![
Ident::with_quote(quote, "CATALOG"),
Ident::with_quote(quote, "my_schema")
])))
);
assert_eq!(
snowflake().verified_stmt(&format!("USE ROLE {0}my_role{0}", quote)),
snowflake().verified_stmt(&format!("USE ROLE {quote}my_role{quote}")),
Statement::Use(Use::Role(ObjectName::from(vec![Ident::with_quote(
quote,
"my_role".to_string(),
)])))
);
assert_eq!(
snowflake().verified_stmt(&format!("USE WAREHOUSE {0}my_wh{0}", quote)),
snowflake().verified_stmt(&format!("USE WAREHOUSE {quote}my_wh{quote}")),
Statement::Use(Use::Warehouse(ObjectName::from(vec![Ident::with_quote(
quote,
"my_wh".to_string(),
@ -3067,7 +3140,7 @@ fn view_comment_option_should_be_after_column_list() {
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') COMMENT = 'Comment' AS SELECT a FROM t",
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') WITH (foo = bar) COMMENT = 'Comment' AS SELECT a FROM t",
] {
snowflake_and_generic()
snowflake()
.verified_stmt(sql);
}
}
@ -3076,7 +3149,7 @@ fn view_comment_option_should_be_after_column_list() {
fn parse_view_column_descriptions() {
let sql = "CREATE OR REPLACE VIEW v (a COMMENT 'Comment', b) AS SELECT a, b FROM table1";
match snowflake_and_generic().verified_stmt(sql) {
match snowflake().verified_stmt(sql) {
Statement::CreateView { name, columns, .. } => {
assert_eq!(name.to_string(), "v");
assert_eq!(
@ -3085,7 +3158,9 @@ fn parse_view_column_descriptions() {
ViewColumnDef {
name: Ident::new("a"),
data_type: None,
options: Some(vec![ColumnOption::Comment("Comment".to_string())]),
options: Some(ColumnOptions::SpaceSeparated(vec![ColumnOption::Comment(
"Comment".to_string()
)])),
},
ViewColumnDef {
name: Ident::new("b"),
@ -3340,10 +3415,38 @@ fn parse_ls_and_rm() {
.unwrap();
}
#[test]
fn test_sql_keywords_as_select_item_ident() {
// Some keywords that should be parsed as an alias
let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT", "SORT"];
for kw in unreserved_kws {
snowflake().verified_stmt(&format!("SELECT 1, {kw}"));
}
// Some keywords that should not be parsed as an alias
let reserved_kws = vec![
"FROM",
"GROUP",
"HAVING",
"INTERSECT",
"INTO",
"ORDER",
"SELECT",
"UNION",
"WHERE",
"WITH",
];
for kw in reserved_kws {
assert!(snowflake()
.parse_sql_statements(&format!("SELECT 1, {kw}"))
.is_err());
}
}
#[test]
fn test_sql_keywords_as_select_item_aliases() {
// Some keywords that should be parsed as an alias
let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT"];
let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT", "SORT"];
for kw in unreserved_kws {
snowflake()
.one_statement_parses_to(&format!("SELECT 1 {kw}"), &format!("SELECT 1 AS {kw}"));
@ -3570,7 +3673,7 @@ fn test_alter_session_followed_by_statement() {
.unwrap();
match stmts[..] {
[Statement::AlterSession { .. }, Statement::Query { .. }] => {}
_ => panic!("Unexpected statements: {:?}", stmts),
_ => panic!("Unexpected statements: {stmts:?}"),
}
}
@ -4025,3 +4128,107 @@ fn parse_connect_by_root_operator() {
"sql parser error: Expected an expression, found: FROM"
);
}
#[test]
fn test_begin_exception_end() {
for sql in [
"BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE; END",
"BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE EX_1; END",
"BEGIN SELECT 1; EXCEPTION WHEN FOO THEN SELECT 2; WHEN OTHER THEN SELECT 3; RAISE; END",
"BEGIN BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE; END; END",
] {
snowflake().verified_stmt(sql);
}
let sql = r#"
DECLARE
EXCEPTION_1 EXCEPTION (-20001, 'I caught the expected exception.');
EXCEPTION_2 EXCEPTION (-20002, 'Not the expected exception!');
EXCEPTION_3 EXCEPTION (-20003, 'The worst exception...');
BEGIN
BEGIN
SELECT 1;
EXCEPTION
WHEN EXCEPTION_1 THEN
SELECT 1;
WHEN EXCEPTION_2 OR EXCEPTION_3 THEN
SELECT 2;
SELECT 3;
WHEN OTHER THEN
SELECT 4;
RAISE;
END;
END
"#;
// Outer `BEGIN` of the two nested `BEGIN` statements.
let Statement::StartTransaction { mut statements, .. } = snowflake()
.parse_sql_statements(sql)
.unwrap()
.pop()
.unwrap()
else {
unreachable!();
};
// Inner `BEGIN` of the two nested `BEGIN` statements.
let Statement::StartTransaction {
statements,
exception,
has_end_keyword,
..
} = statements.pop().unwrap()
else {
unreachable!();
};
assert_eq!(1, statements.len());
assert!(has_end_keyword);
let exception = exception.unwrap();
assert_eq!(3, exception.len());
assert_eq!(1, exception[0].idents.len());
assert_eq!(1, exception[0].statements.len());
assert_eq!(2, exception[1].idents.len());
assert_eq!(2, exception[1].statements.len());
}
#[test]
fn test_snowflake_fetch_clause_syntax() {
let canonical = "SELECT c1 FROM fetch_test FETCH FIRST 2 ROWS ONLY";
snowflake().verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH 2", canonical);
snowflake()
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH FIRST 2", canonical);
snowflake()
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH NEXT 2", canonical);
snowflake()
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH 2 ROW", canonical);
snowflake().verified_only_select_with_canonical(
"SELECT c1 FROM fetch_test FETCH FIRST 2 ROWS",
canonical,
);
}
#[test]
fn test_snowflake_create_view_with_multiple_column_options() {
let create_view_with_tag =
r#"CREATE VIEW X (COL WITH TAG (pii='email') COMMENT 'foobar') AS SELECT * FROM Y"#;
snowflake().verified_stmt(create_view_with_tag);
}
#[test]
fn test_snowflake_create_view_with_composite_tag() {
let create_view_with_tag =
r#"CREATE VIEW X (COL WITH TAG (foo.bar.baz.pii='email')) AS SELECT * FROM Y"#;
snowflake().verified_stmt(create_view_with_tag);
}
#[test]
fn test_snowflake_create_view_with_composite_policy_name() {
let create_view_with_tag =
r#"CREATE VIEW X (COL WITH MASKING POLICY foo.bar.baz) AS SELECT * FROM Y"#;
snowflake().verified_stmt(create_view_with_tag);
}

View file

@ -324,7 +324,7 @@ fn parse_create_table_on_conflict_col() {
Keyword::IGNORE,
Keyword::REPLACE,
] {
let sql = format!("CREATE TABLE t1 (a INT, b INT ON CONFLICT {:?})", keyword);
let sql = format!("CREATE TABLE t1 (a INT, b INT ON CONFLICT {keyword:?})");
match sqlite_and_generic().verified_stmt(&sql) {
Statement::CreateTable(CreateTable { columns, .. }) => {
assert_eq!(
@ -410,7 +410,7 @@ fn parse_window_function_with_filter() {
"count",
"user_defined_function",
] {
let sql = format!("SELECT {}(x) FILTER (WHERE y) OVER () FROM t", func_name);
let sql = format!("SELECT {func_name}(x) FILTER (WHERE y) OVER () FROM t");
let select = sqlite().verified_only_select(&sql);
assert_eq!(select.to_string(), sql);
assert_eq!(
@ -444,7 +444,7 @@ fn parse_window_function_with_filter() {
fn parse_attach_database() {
let sql = "ATTACH DATABASE 'test.db' AS test";
let verified_stmt = sqlite().verified_stmt(sql);
assert_eq!(sql, format!("{}", verified_stmt));
assert_eq!(sql, format!("{verified_stmt}"));
match verified_stmt {
Statement::AttachDatabase {
schema_name,
@ -562,6 +562,36 @@ fn test_dollar_identifier_as_placeholder() {
}
}
#[test]
fn test_match_operator() {
assert_eq!(
sqlite().verified_expr("col MATCH 'pattern'"),
Expr::BinaryOp {
op: BinaryOperator::Match,
left: Box::new(Expr::Identifier(Ident::new("col"))),
right: Box::new(Expr::Value(
(Value::SingleQuotedString("pattern".to_string())).with_empty_span()
))
}
);
sqlite().verified_only_select("SELECT * FROM email WHERE email MATCH 'fts5'");
}
#[test]
fn test_regexp_operator() {
assert_eq!(
sqlite().verified_expr("col REGEXP 'pattern'"),
Expr::BinaryOp {
op: BinaryOperator::Regexp,
left: Box::new(Expr::Identifier(Ident::new("col"))),
right: Box::new(Expr::Value(
(Value::SingleQuotedString("pattern".to_string())).with_empty_span()
))
}
);
sqlite().verified_only_select(r#"SELECT count(*) FROM messages WHERE msg_text REGEXP '\d+'"#);
}
fn sqlite() -> TestedDialects {
TestedDialects::new(vec![Box::new(SQLiteDialect {})])
}