diff --git a/.github/workflows/license.yml b/.github/workflows/license.yml deleted file mode 100644 index c851bff3..00000000 --- a/.github/workflows/license.yml +++ /dev/null @@ -1,39 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -name: license - -# trigger for all PRs and changes to main -on: - push: - branches: - - main - pull_request: - -jobs: - - rat: - name: Release Audit Tool (RAT) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: 3.8 - - name: Audit licenses - run: ./dev/release/run-rat.sh . diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 3abf9d38..2502abe9 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -19,9 +19,6 @@ name: Rust on: [push, pull_request] -permissions: - contents: read - jobs: codestyle: @@ -30,7 +27,7 @@ jobs: - uses: actions/checkout@v4 - name: Setup Rust Toolchain uses: ./.github/actions/setup-builder - - run: cargo fmt --all -- --check + - run: cargo fmt -- --check lint: runs-on: ubuntu-latest @@ -40,14 +37,6 @@ jobs: uses: ./.github/actions/setup-builder - run: cargo clippy --all-targets --all-features -- -D warnings - benchmark-lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Setup Rust Toolchain - uses: ./.github/actions/setup-builder - - run: cd sqlparser_bench && cargo clippy --all-targets --all-features -- -D warnings - compile: runs-on: ubuntu-latest steps: @@ -88,8 +77,11 @@ jobs: uses: ./.github/actions/setup-builder with: rust-version: ${{ matrix.rust }} - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - name: Install Tarpaulin - run: cargo install cargo-tarpaulin + uses: actions-rs/install@v0.1 + with: + crate: cargo-tarpaulin + version: 0.14.2 + use-tool-cache: true - name: Test run: cargo test --all-features diff --git a/CHANGELOG.md b/CHANGELOG.md index a5511a05..7354588b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,19 +18,12 @@ --> # Changelog -All notable changes to this project will be documented in one of the linked -files. +All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - Given that the parser produces a typed AST, any changes to the AST will technically be breaking and thus will result in a `0.(N+1)` version. - - Unreleased: Check https://github.com/sqlparser-rs/sqlparser-rs/commits/main for undocumented changes. -- `0.56.0`: [changelog/0.56.0.md](changelog/0.56.0.md) -- `0.55.0`: [changelog/0.55.0.md](changelog/0.55.0.md) -- `0.54.0`: [changelog/0.54.0.md](changelog/0.54.0.md) -- `0.53.0`: [changelog/0.53.0.md](changelog/0.53.0.md) - `0.52.0`: [changelog/0.52.0.md](changelog/0.52.0.md) -- `0.51.0` and earlier: [changelog/0.51.0-pre.md](changelog/0.51.0-pre.md) +- `0.51.0` and earlier: [changelog/0.51.0-pre.md](changelog/0.51.0-pre.md) \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index 07e44f66..18b246e0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,7 +18,7 @@ [package] name = "sqlparser" description = "Extensible SQL Lexer and Parser with support for ANSI SQL:2011" -version = "0.57.0" +version = "0.52.0" authors = ["Apache DataFusion "] homepage = "https://github.com/apache/datafusion-sqlparser-rs" documentation = "https://docs.rs/sqlparser/" @@ -37,9 +37,8 @@ name = "sqlparser" path = "src/lib.rs" [features] -default = ["std", "recursive-protection"] +default = ["std"] std = [] -recursive-protection = ["std", "recursive"] # Enable JSON output in the `cli` example: json_example = ["serde_json", "serde"] visitor = ["sqlparser_derive"] @@ -47,14 +46,12 @@ visitor = ["sqlparser_derive"] [dependencies] bigdecimal = { version = "0.4.1", features = ["serde"], optional = true } log = "0.4" -recursive = { version = "0.1.1", optional = true} - -serde = { version = "1.0", default-features = false, features = ["derive", "alloc"], optional = true } +serde = { version = "1.0", features = ["derive"], optional = true } # serde_json is only used in examples/cli, but we have to put it outside # of dev-dependencies because of # https://github.com/rust-lang/cargo/issues/1596 serde_json = { version = "1.0", optional = true } -sqlparser_derive = { version = "0.3.0", path = "derive", optional = true } +sqlparser_derive = { version = "0.2.0", path = "derive", optional = true } [dev-dependencies] simple_logger = "5.0" diff --git a/README.md b/README.md index 666be17c..934d9d06 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ println!("AST: {:?}", ast); This outputs ```rust -AST: [Query(Query { ctes: [], body: Select(Select { distinct: false, projection: [UnnamedExpr(Identifier("a")), UnnamedExpr(Identifier("b")), UnnamedExpr(Value(Long(123))), UnnamedExpr(Function(Function { name:ObjectName([Identifier(Ident { value: "myfunc", quote_style: None })]), args: [Identifier("b")], filter: None, over: None, distinct: false }))], from: [TableWithJoins { relation: Table { name: ObjectName([Identifier(Ident { value: "table_1", quote_style: None })]), alias: None, args: [], with_hints: [] }, joins: [] }], selection: Some(BinaryOp { left: BinaryOp { left: Identifier("a"), op: Gt, right: Identifier("b") }, op: And, right: BinaryOp { left: Identifier("b"), op: Lt, right: Value(Long(100)) } }), group_by: [], having: None }), order_by: [OrderByExpr { expr: Identifier("a"), asc: Some(false) }, OrderByExpr { expr: Identifier("b"), asc: None }], limit: None, offset: None, fetch: None })] +AST: [Query(Query { ctes: [], body: Select(Select { distinct: false, projection: [UnnamedExpr(Identifier("a")), UnnamedExpr(Identifier("b")), UnnamedExpr(Value(Long(123))), UnnamedExpr(Function(Function { name: ObjectName(["myfunc"]), args: [Identifier("b")], filter: None, over: None, distinct: false }))], from: [TableWithJoins { relation: Table { name: ObjectName(["table_1"]), alias: None, args: [], with_hints: [] }, joins: [] }], selection: Some(BinaryOp { left: BinaryOp { left: Identifier("a"), op: Gt, right: Identifier("b") }, op: And, right: BinaryOp { left: Identifier("b"), op: Lt, right: Value(Long(100)) } }), group_by: [], having: None }), order_by: [OrderByExpr { expr: Identifier("a"), asc: Some(false) }, OrderByExpr { expr: Identifier("b"), asc: None }], limit: None, offset: None, fetch: None })] ``` @@ -63,7 +63,7 @@ The following optional [crate features](https://doc.rust-lang.org/cargo/referen * `serde`: Adds [Serde](https://serde.rs/) support by implementing `Serialize` and `Deserialize` for all AST nodes. * `visitor`: Adds a `Visitor` capable of recursively walking the AST tree. -* `recursive-protection` (enabled by default), uses [recursive](https://docs.rs/recursive/latest/recursive/) for stack overflow protection. + ## Syntax vs Semantics @@ -89,14 +89,10 @@ keywords, the following should hold true for all SQL: ```rust // Parse SQL -let sql = "SELECT 'hello'"; let ast = Parser::parse_sql(&GenericDialect, sql).unwrap(); // The original SQL text can be generated from the AST assert_eq!(ast[0].to_string(), sql); - -// The SQL can also be pretty-printed with newlines and indentation -assert_eq!(format!("{:#}", ast[0]), "SELECT\n 'hello'"); ``` There are still some cases in this crate where different SQL with seemingly @@ -104,37 +100,15 @@ similar semantics are represented with the same AST. We welcome PRs to fix such issues and distinguish different syntaxes in the AST. -## Source Locations (Work in Progress) - -This crate allows recovering source locations from AST nodes via the [Spanned] -trait, which can be used for advanced diagnostics tooling. Note that this -feature is a work in progress and many nodes report missing or inaccurate spans. -Please see [this ticket] for information on how to contribute missing -improvements. - -[Spanned]: https://docs.rs/sqlparser/latest/sqlparser/ast/trait.Spanned.html -[this ticket]: https://github.com/apache/datafusion-sqlparser-rs/issues/1548 - -```rust -// Parse SQL -let ast = Parser::parse_sql(&GenericDialect, "SELECT A FROM B").unwrap(); - -// The source span can be retrieved with start and end locations -assert_eq!(ast[0].span(), Span { - start: Location::of(1, 1), - end: Location::of(1, 16), -}); -``` - ## SQL compliance SQL was first standardized in 1987, and revisions of the standard have been published regularly since. Most revisions have added significant new features to the language, and as a result no database claims to support the full breadth of features. This parser currently supports most of the SQL-92 syntax, plus some -syntax from newer versions that have been explicitly requested, plus various -other dialect-specific syntax. Whenever possible, the [online SQL:2016 -grammar][sql-2016-grammar] is used to guide what syntax to accept. +syntax from newer versions that have been explicitly requested, plus some MSSQL, +PostgreSQL, and other dialect-specific syntax. Whenever possible, the [online +SQL:2016 grammar][sql-2016-grammar] is used to guide what syntax to accept. Unfortunately, stating anything more specific about compliance is difficult. There is no publicly available test suite that can assess compliance @@ -160,8 +134,7 @@ $ cargo run --features json_example --example cli FILENAME.sql [--dialectname] ## Users This parser is currently being used by the [DataFusion] query engine, [LocustDB], -[Ballista], [GlueSQL], [Opteryx], [Polars], [PRQL], [Qrlew], [JumpWire], [ParadeDB], [CipherStash Proxy], -and [GreptimeDB]. +[Ballista], [GlueSQL], [Opteryx], [Polars], [PRQL], [Qrlew], [JumpWire], and [ParadeDB]. If your project is using sqlparser-rs feel free to make a PR to add it to this list. @@ -237,21 +210,6 @@ Our goal as maintainers is to facilitate the integration of various features from various contributors, but not to provide the implementations ourselves, as we simply don't have the resources. -### Benchmarking - -There are several micro benchmarks in the `sqlparser_bench` directory. -You can run them with: - -``` -git checkout main -cd sqlparser_bench -cargo bench -- --save-baseline main -git checkout -cargo bench -- --baseline main -``` - -By adding the `--save-baseline main` and `--baseline main` you can track the -progress of your improvements as you continue working on the feature branch. ## Licensing @@ -280,5 +238,3 @@ licensed as above, without any additional terms or conditions. [sql-standard]: https://en.wikipedia.org/wiki/ISO/IEC_9075 [`Dialect`]: https://docs.rs/sqlparser/latest/sqlparser/dialect/trait.Dialect.html [`GenericDialect`]: https://docs.rs/sqlparser/latest/sqlparser/dialect/struct.GenericDialect.html -[CipherStash Proxy]: https://github.com/cipherstash/proxy -[GreptimeDB]: https://github.com/GreptimeTeam/greptimedb diff --git a/changelog/0.52.0.md b/changelog/0.52.0.md index 9d5b16c7..d27fbdb1 100644 --- a/changelog/0.52.0.md +++ b/changelog/0.52.0.md @@ -19,7 +19,7 @@ under the License. # sqlparser-rs 0.52.0 Changelog -This release consists of 45 commits from 20 contributors. See credits at the end of this changelog for more information. +This release consists of 43 commits from 20 contributors. See credits at the end of this changelog for more information. **Implemented enhancements:** @@ -68,10 +68,6 @@ This release consists of 45 commits from 20 contributors. See credits at the end - Fix typo in `Dialect::supports_eq_alias_assigment` [#1478](https://github.com/apache/datafusion-sqlparser-rs/pull/1478) (alamb) - Add support for PostgreSQL `LISTEN/NOTIFY` syntax [#1485](https://github.com/apache/datafusion-sqlparser-rs/pull/1485) (wugeer) - Add support for TOP before ALL/DISTINCT [#1495](https://github.com/apache/datafusion-sqlparser-rs/pull/1495) (yoavcloud) -- add support for `FOR ORDINALITY` and `NESTED` in JSON_TABLE [#1493](https://github.com/apache/datafusion-sqlparser-rs/pull/1493) (lovasoa) -- Add Apache License to additional files [#1502](https://github.com/apache/datafusion-sqlparser-rs/pull/1502) (alamb) -- Move CHANGELOG content [#1503](https://github.com/apache/datafusion-sqlparser-rs/pull/1503) (alamb) -- improve support for T-SQL EXECUTE statements [#1490](https://github.com/apache/datafusion-sqlparser-rs/pull/1490) (lovasoa) ## Credits @@ -83,7 +79,6 @@ Thank you to everyone who contributed to this release. Here is a breakdown of co 7 hulk 3 Aleksei Piianin 3 David Caldwell - 3 Ophir LOJKINE 1 Agaev Guseyn 1 Eason 1 Fischer @@ -91,6 +86,7 @@ Thank you to everyone who contributed to this release. Here is a breakdown of co 1 Heran Lin 1 Joshua Warner 1 Maxwell Knight + 1 Ophir LOJKINE 1 Seve Martinez 1 Siyuan Huang 1 Thomas Dagenais diff --git a/changelog/0.53.0.md b/changelog/0.53.0.md deleted file mode 100644 index 5b9de07d..00000000 --- a/changelog/0.53.0.md +++ /dev/null @@ -1,95 +0,0 @@ - - -# sqlparser-rs 0.53.0 Changelog - -This release consists of 47 commits from 16 contributors. See credits at the end of this changelog for more information. - -**Other:** - -- hive: support for special not expression `!a` and raise error for `a!` factorial operator [#1472](https://github.com/apache/datafusion-sqlparser-rs/pull/1472) (wugeer) -- Add support for MSSQL's `OPENJSON WITH` clause [#1498](https://github.com/apache/datafusion-sqlparser-rs/pull/1498) (gaoqiangz) -- Parse true and false as identifiers in mssql [#1510](https://github.com/apache/datafusion-sqlparser-rs/pull/1510) (lovasoa) -- Fix the parsing error in MSSQL for multiple statements that include `DECLARE` statements [#1497](https://github.com/apache/datafusion-sqlparser-rs/pull/1497) (wugeer) -- Add support for Snowflake SHOW DATABASES/SCHEMAS/TABLES/VIEWS/COLUMNS statements [#1501](https://github.com/apache/datafusion-sqlparser-rs/pull/1501) (yoavcloud) -- Add support of COMMENT ON syntax for Snowflake [#1516](https://github.com/apache/datafusion-sqlparser-rs/pull/1516) (git-hulk) -- Add support for MYSQL's `CREATE TABLE SELECT` expr [#1515](https://github.com/apache/datafusion-sqlparser-rs/pull/1515) (wugeer) -- Add support for MSSQL's `XQuery` methods [#1500](https://github.com/apache/datafusion-sqlparser-rs/pull/1500) (gaoqiangz) -- Add support for Hive's `LOAD DATA` expr [#1520](https://github.com/apache/datafusion-sqlparser-rs/pull/1520) (wugeer) -- Fix ClickHouse document link from `Russian` to `English` [#1527](https://github.com/apache/datafusion-sqlparser-rs/pull/1527) (git-hulk) -- Support ANTI and SEMI joins without LEFT/RIGHT [#1528](https://github.com/apache/datafusion-sqlparser-rs/pull/1528) (delamarch3) -- support sqlite's OR clauses in update statements [#1530](https://github.com/apache/datafusion-sqlparser-rs/pull/1530) (lovasoa) -- support column type definitions in table aliases [#1526](https://github.com/apache/datafusion-sqlparser-rs/pull/1526) (lovasoa) -- Add support for MSSQL's `JSON_ARRAY`/`JSON_OBJECT` expr [#1507](https://github.com/apache/datafusion-sqlparser-rs/pull/1507) (gaoqiangz) -- Add support for PostgreSQL `UNLISTEN` syntax and Add support for Postgres `LOAD extension` expr [#1531](https://github.com/apache/datafusion-sqlparser-rs/pull/1531) (wugeer) -- Parse byte/bit string literals in MySQL and Postgres [#1532](https://github.com/apache/datafusion-sqlparser-rs/pull/1532) (mvzink) -- Allow example CLI to read from stdin [#1536](https://github.com/apache/datafusion-sqlparser-rs/pull/1536) (mvzink) -- recursive select calls are parsed with bad trailing_commas parameter [#1521](https://github.com/apache/datafusion-sqlparser-rs/pull/1521) (tomershaniii) -- PartiQL queries in Redshift [#1534](https://github.com/apache/datafusion-sqlparser-rs/pull/1534) (yoavcloud) -- Include license file in sqlparser_derive crate [#1543](https://github.com/apache/datafusion-sqlparser-rs/pull/1543) (ankane) -- Fallback to identifier parsing if expression parsing fails [#1513](https://github.com/apache/datafusion-sqlparser-rs/pull/1513) (yoavcloud) -- support `json_object('k':'v')` in postgres [#1546](https://github.com/apache/datafusion-sqlparser-rs/pull/1546) (lovasoa) -- Document micro benchmarks [#1555](https://github.com/apache/datafusion-sqlparser-rs/pull/1555) (alamb) -- Implement `Spanned` to retrieve source locations on AST nodes [#1435](https://github.com/apache/datafusion-sqlparser-rs/pull/1435) (Nyrox) -- Fix error in benchmark queries [#1560](https://github.com/apache/datafusion-sqlparser-rs/pull/1560) (alamb) -- Fix clippy warnings on rust 1.83 [#1570](https://github.com/apache/datafusion-sqlparser-rs/pull/1570) (iffyio) -- Support relation visitor to visit the `Option` field [#1556](https://github.com/apache/datafusion-sqlparser-rs/pull/1556) (goldmedal) -- Rename `TokenWithLocation` to `TokenWithSpan`, in backwards compatible way [#1562](https://github.com/apache/datafusion-sqlparser-rs/pull/1562) (alamb) -- Support MySQL size variants for BLOB and TEXT columns [#1564](https://github.com/apache/datafusion-sqlparser-rs/pull/1564) (mvzink) -- Increase version of sqlparser_derive from 0.2.2 to 0.3.0 [#1571](https://github.com/apache/datafusion-sqlparser-rs/pull/1571) (alamb) -- `json_object('k' VALUE 'v')` in postgres [#1547](https://github.com/apache/datafusion-sqlparser-rs/pull/1547) (lovasoa) -- Support snowflake double dot notation for object name [#1540](https://github.com/apache/datafusion-sqlparser-rs/pull/1540) (ayman-sigma) -- Update comments / docs for `Spanned` [#1549](https://github.com/apache/datafusion-sqlparser-rs/pull/1549) (alamb) -- Support Databricks struct literal [#1542](https://github.com/apache/datafusion-sqlparser-rs/pull/1542) (ayman-sigma) -- Encapsulate CreateFunction [#1573](https://github.com/apache/datafusion-sqlparser-rs/pull/1573) (philipcristiano) -- Support BIT column types [#1577](https://github.com/apache/datafusion-sqlparser-rs/pull/1577) (mvzink) -- Support parsing optional nulls handling for unique constraint [#1567](https://github.com/apache/datafusion-sqlparser-rs/pull/1567) (mvzink) -- Fix displaying WORK or TRANSACTION after BEGIN [#1565](https://github.com/apache/datafusion-sqlparser-rs/pull/1565) (mvzink) -- Add support of the ENUM8|ENUM16 for ClickHouse dialect [#1574](https://github.com/apache/datafusion-sqlparser-rs/pull/1574) (git-hulk) -- Parse Snowflake USE ROLE and USE SECONDARY ROLES [#1578](https://github.com/apache/datafusion-sqlparser-rs/pull/1578) (yoavcloud) -- Snowflake ALTER TABLE clustering options [#1579](https://github.com/apache/datafusion-sqlparser-rs/pull/1579) (yoavcloud) -- Support INSERT OVERWRITE INTO syntax [#1584](https://github.com/apache/datafusion-sqlparser-rs/pull/1584) (yuval-illumex) -- Parse `INSERT` with subquery when lacking column names [#1586](https://github.com/apache/datafusion-sqlparser-rs/pull/1586) (iffyio) -- Add support for ODBC functions [#1585](https://github.com/apache/datafusion-sqlparser-rs/pull/1585) (iffyio) - -## Credits - -Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor. - -``` - 8 Andrew Lamb - 6 Michael Victor Zink - 5 Ophir LOJKINE - 5 Yoav Cohen - 5 wugeer - 3 Ifeanyi Ubah - 3 gaoqiangz - 3 hulk - 2 Ayman Elkfrawy - 1 Andrew Kane - 1 Jax Liu - 1 Mark-Oliver Junge - 1 Philip Cristiano - 1 Yuval Shkolar - 1 delamarch3 - 1 tomershaniii -``` - -Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release. - diff --git a/changelog/0.54.0.md b/changelog/0.54.0.md deleted file mode 100644 index c0a63ae4..00000000 --- a/changelog/0.54.0.md +++ /dev/null @@ -1,118 +0,0 @@ - - -# sqlparser-rs 0.54.0 Changelog - -This release consists of 57 commits from 24 contributors. See credits at the end of this changelog for more information. - -**Implemented enhancements:** - -- feat: support `INSERT INTO [TABLE] FUNCTION` of Clickhouse [#1633](https://github.com/apache/datafusion-sqlparser-rs/pull/1633) (byte-sourcerer) - -**Other:** - -- Run cargo fmt on `derive` crate [#1595](https://github.com/apache/datafusion-sqlparser-rs/pull/1595) (alamb) -- Add Apache license header to spans.rs [#1594](https://github.com/apache/datafusion-sqlparser-rs/pull/1594) (alamb) -- Add support for BigQuery `ANY TYPE` data type [#1602](https://github.com/apache/datafusion-sqlparser-rs/pull/1602) (MartinSahlen) -- Add support for TABLESAMPLE [#1580](https://github.com/apache/datafusion-sqlparser-rs/pull/1580) (yoavcloud) -- Redshift: Fix parsing for quoted numbered columns [#1576](https://github.com/apache/datafusion-sqlparser-rs/pull/1576) (7phs) -- Add the alter table ON COMMIT option to Snowflake [#1606](https://github.com/apache/datafusion-sqlparser-rs/pull/1606) (yoavcloud) -- Support parsing `EXPLAIN ESTIMATE` of Clickhouse [#1605](https://github.com/apache/datafusion-sqlparser-rs/pull/1605) (byte-sourcerer) -- Fix BigQuery hyphenated ObjectName with numbers [#1598](https://github.com/apache/datafusion-sqlparser-rs/pull/1598) (ayman-sigma) -- Fix test compilation issue [#1609](https://github.com/apache/datafusion-sqlparser-rs/pull/1609) (iffyio) -- Allow foreign table constraint without columns [#1608](https://github.com/apache/datafusion-sqlparser-rs/pull/1608) (ramnivas) -- Support optional table for `ANALYZE` statement [#1599](https://github.com/apache/datafusion-sqlparser-rs/pull/1599) (yuyang-ok) -- Support DOUBLE data types with precision for Mysql [#1611](https://github.com/apache/datafusion-sqlparser-rs/pull/1611) (artorias1024) -- Add `#[recursive]` [#1522](https://github.com/apache/datafusion-sqlparser-rs/pull/1522) (blaginin) -- Support arbitrary composite access expressions [#1600](https://github.com/apache/datafusion-sqlparser-rs/pull/1600) (ayman-sigma) -- Consolidate `MapAccess`, and `Subscript` into `CompoundExpr` to handle the complex field access chain [#1551](https://github.com/apache/datafusion-sqlparser-rs/pull/1551) (goldmedal) -- Handle empty projection in Postgres SELECT statements [#1613](https://github.com/apache/datafusion-sqlparser-rs/pull/1613) (tobyhede) -- Merge composite and compound expr test cases [#1615](https://github.com/apache/datafusion-sqlparser-rs/pull/1615) (iffyio) -- Support Snowflake Update-From-Select [#1604](https://github.com/apache/datafusion-sqlparser-rs/pull/1604) (yuval-illumex) -- Improve parsing performance by reducing token cloning [#1587](https://github.com/apache/datafusion-sqlparser-rs/pull/1587) (davisp) -- Improve Parser documentation [#1617](https://github.com/apache/datafusion-sqlparser-rs/pull/1617) (alamb) -- Add support for DROP EXTENSION [#1610](https://github.com/apache/datafusion-sqlparser-rs/pull/1610) (ramnivas) -- Refactor advancing token to avoid duplication, avoid borrow checker issues [#1618](https://github.com/apache/datafusion-sqlparser-rs/pull/1618) (alamb) -- Fix the parsing result for the special double number [#1621](https://github.com/apache/datafusion-sqlparser-rs/pull/1621) (goldmedal) -- SQLite: Allow dollar signs in placeholder names [#1620](https://github.com/apache/datafusion-sqlparser-rs/pull/1620) (hansott) -- Improve error for an unexpected token after DROP [#1623](https://github.com/apache/datafusion-sqlparser-rs/pull/1623) (ramnivas) -- Fix `sqlparser_bench` benchmark compilation [#1625](https://github.com/apache/datafusion-sqlparser-rs/pull/1625) (alamb) -- Improve parsing speed by avoiding some clones in parse_identifier [#1624](https://github.com/apache/datafusion-sqlparser-rs/pull/1624) (alamb) -- Simplify `parse_keyword_apis` more [#1626](https://github.com/apache/datafusion-sqlparser-rs/pull/1626) (alamb) -- Test benchmarks and Improve benchmark README.md [#1627](https://github.com/apache/datafusion-sqlparser-rs/pull/1627) (alamb) -- Add support for MYSQL's `RENAME TABLE` [#1616](https://github.com/apache/datafusion-sqlparser-rs/pull/1616) (wugeer) -- Correctly tokenize nested comments [#1629](https://github.com/apache/datafusion-sqlparser-rs/pull/1629) (hansott) -- Add support for USE SECONDARY ROLE (vs. ROLES) [#1637](https://github.com/apache/datafusion-sqlparser-rs/pull/1637) (yoavcloud) -- Add support for various Snowflake grantees [#1640](https://github.com/apache/datafusion-sqlparser-rs/pull/1640) (yoavcloud) -- Add support for the SQL OVERLAPS predicate [#1638](https://github.com/apache/datafusion-sqlparser-rs/pull/1638) (yoavcloud) -- Add support for Snowflake LIST and REMOVE [#1639](https://github.com/apache/datafusion-sqlparser-rs/pull/1639) (yoavcloud) -- Add support for MySQL's INSERT INTO ... SET syntax [#1641](https://github.com/apache/datafusion-sqlparser-rs/pull/1641) (yoavcloud) -- Start new line if \r in Postgres dialect [#1647](https://github.com/apache/datafusion-sqlparser-rs/pull/1647) (hansott) -- Support pluralized time units [#1630](https://github.com/apache/datafusion-sqlparser-rs/pull/1630) (wugeer) -- Replace `ReferentialAction` enum in `DROP` statements [#1648](https://github.com/apache/datafusion-sqlparser-rs/pull/1648) (stepancheg) -- Add support for MS-SQL BEGIN/END TRY/CATCH [#1649](https://github.com/apache/datafusion-sqlparser-rs/pull/1649) (yoavcloud) -- Fix MySQL parsing of GRANT, REVOKE, and CREATE VIEW [#1538](https://github.com/apache/datafusion-sqlparser-rs/pull/1538) (mvzink) -- Add support for the Snowflake MINUS set operator [#1652](https://github.com/apache/datafusion-sqlparser-rs/pull/1652) (yoavcloud) -- ALTER TABLE DROP {COLUMN|CONSTRAINT} RESTRICT [#1651](https://github.com/apache/datafusion-sqlparser-rs/pull/1651) (stepancheg) -- Add support for ClickHouse `FORMAT` on `INSERT` [#1628](https://github.com/apache/datafusion-sqlparser-rs/pull/1628) (bombsimon) -- MsSQL SET for session params [#1646](https://github.com/apache/datafusion-sqlparser-rs/pull/1646) (yoavcloud) -- Correctly look for end delimiter dollar quoted string [#1650](https://github.com/apache/datafusion-sqlparser-rs/pull/1650) (hansott) -- Support single line comments starting with '#' for Hive [#1654](https://github.com/apache/datafusion-sqlparser-rs/pull/1654) (wugeer) -- Support trailing commas in `FROM` clause [#1645](https://github.com/apache/datafusion-sqlparser-rs/pull/1645) (barsela1) -- Allow empty options for BigQuery [#1657](https://github.com/apache/datafusion-sqlparser-rs/pull/1657) (MartinSahlen) -- Add support for parsing RAISERROR [#1656](https://github.com/apache/datafusion-sqlparser-rs/pull/1656) (AvivDavid-Satori) -- Add support for Snowflake column aliases that use SQL keywords [#1632](https://github.com/apache/datafusion-sqlparser-rs/pull/1632) (yoavcloud) -- fix parsing of `INSERT INTO ... SELECT ... RETURNING ` [#1661](https://github.com/apache/datafusion-sqlparser-rs/pull/1661) (lovasoa) -- Add support for `IS [NOT] [form] NORMALIZED` [#1655](https://github.com/apache/datafusion-sqlparser-rs/pull/1655) (alexander-beedie) -- Add support for qualified column names in JOIN ... USING [#1663](https://github.com/apache/datafusion-sqlparser-rs/pull/1663) (yoavcloud) -- Add support for Snowflake AT/BEFORE [#1667](https://github.com/apache/datafusion-sqlparser-rs/pull/1667) (yoavcloud) - -## Credits - -Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor. - -``` - 13 Yoav Cohen - 9 Andrew Lamb - 4 Hans Ott - 3 Ramnivas Laddad - 3 wugeer - 2 Ayman Elkfrawy - 2 Ifeanyi Ubah - 2 Jax Liu - 2 Martin Abelson Sahlen - 2 Stepan Koltsov - 2 cjw - 1 Aleksei Piianin - 1 Alexander Beedie - 1 AvivDavid-Satori - 1 Dmitrii Blaginin - 1 Michael Victor Zink - 1 Ophir LOJKINE - 1 Paul J. Davis - 1 Simon Sawert - 1 Toby Hede - 1 Yuval Shkolar - 1 artorias1024 - 1 bar sela - 1 yuyang -``` - -Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release. - diff --git a/changelog/0.55.0.md b/changelog/0.55.0.md deleted file mode 100644 index 046bf22b..00000000 --- a/changelog/0.55.0.md +++ /dev/null @@ -1,173 +0,0 @@ - - -# sqlparser-rs 0.55.0 Changelog - -This release consists of 55 commits from 25 contributors. See credits at the end of this changelog for more information. - -## Migrating usages of `Expr::Value` - -In v0.55 of sqlparser the `Expr::Value` enum variant contains a `ValueWithSpan` instead of a `Value`. Here is how to migrate. - -### When pattern matching - -```diff -- Expr::Value(Value::SingleQuotedString(my_string)) => { ... } -+ Expr::Value(ValueWithSpan{ value: Value::SingleQuotedString(my_string), span: _ }) => { ... } -``` - -### When creating an `Expr` - -Use the new `Expr::value` method (notice the lowercase `v`), which will create a `ValueWithSpan` containing an empty span: - -```diff -- Expr::Value(Value::SingleQuotedString(my_string)) -+ Expr::value(Value::SingleQuotedString(my_string)) -``` - -## Migrating usages of `ObjectName` - -In v0.55 of sqlparser, the `ObjectName` structure has been changed as shown below. Here is now to migrate. - -```diff -- pub struct ObjectName(pub Vec); -+ pub struct ObjectName(pub Vec) -``` - -### When constructing `ObjectName` - -Use the `From` impl: - -```diff -- name: ObjectName(vec![Ident::new("f")]), -+ name: ObjectName::from(vec![Ident::new("f")]), -``` - -### Accessing Spans - -Use the `span()` function - -```diff -- name.span -+ name.span() -``` - - - -**Breaking changes:** - -- Enhance object name path segments [#1539](https://github.com/apache/datafusion-sqlparser-rs/pull/1539) (ayman-sigma) -- Store spans for Value expressions [#1738](https://github.com/apache/datafusion-sqlparser-rs/pull/1738) (lovasoa) - -**Implemented enhancements:** - -- feat: adjust create and drop trigger for mysql dialect [#1734](https://github.com/apache/datafusion-sqlparser-rs/pull/1734) (invm) - -**Fixed bugs:** - -- fix: make `serde` feature no_std [#1730](https://github.com/apache/datafusion-sqlparser-rs/pull/1730) (iajoiner) - -**Other:** - -- Update rat_exclude_file.txt [#1670](https://github.com/apache/datafusion-sqlparser-rs/pull/1670) (alamb) -- Add support for Snowflake account privileges [#1666](https://github.com/apache/datafusion-sqlparser-rs/pull/1666) (yoavcloud) -- Add support for Create Iceberg Table statement for Snowflake parser [#1664](https://github.com/apache/datafusion-sqlparser-rs/pull/1664) (Vedin) -- National strings: check if dialect supports backslash escape [#1672](https://github.com/apache/datafusion-sqlparser-rs/pull/1672) (hansott) -- Only support escape literals for Postgres, Redshift and generic dialect [#1674](https://github.com/apache/datafusion-sqlparser-rs/pull/1674) (hansott) -- BigQuery: Support trailing commas in column definitions list [#1682](https://github.com/apache/datafusion-sqlparser-rs/pull/1682) (iffyio) -- Enable GROUP BY exp for Snowflake dialect [#1683](https://github.com/apache/datafusion-sqlparser-rs/pull/1683) (yoavcloud) -- Add support for parsing empty dictionary expressions [#1684](https://github.com/apache/datafusion-sqlparser-rs/pull/1684) (yoavcloud) -- Support multiple tables in `UPDATE FROM` clause [#1681](https://github.com/apache/datafusion-sqlparser-rs/pull/1681) (iffyio) -- Add support for mysql table hints [#1675](https://github.com/apache/datafusion-sqlparser-rs/pull/1675) (AvivDavid-Satori) -- BigQuery: Add support for select expr star [#1680](https://github.com/apache/datafusion-sqlparser-rs/pull/1680) (iffyio) -- Support underscore separators in numbers for Clickhouse. Fixes #1659 [#1677](https://github.com/apache/datafusion-sqlparser-rs/pull/1677) (graup) -- BigQuery: Fix column identifier reserved keywords list [#1678](https://github.com/apache/datafusion-sqlparser-rs/pull/1678) (iffyio) -- Fix bug when parsing a Snowflake stage with `;` suffix [#1688](https://github.com/apache/datafusion-sqlparser-rs/pull/1688) (yoavcloud) -- Allow plain JOIN without turning it into INNER [#1692](https://github.com/apache/datafusion-sqlparser-rs/pull/1692) (mvzink) -- Fix DDL generation in case of an empty arguments function. [#1690](https://github.com/apache/datafusion-sqlparser-rs/pull/1690) (remysaissy) -- Fix `CREATE FUNCTION` round trip for Hive dialect [#1693](https://github.com/apache/datafusion-sqlparser-rs/pull/1693) (iffyio) -- Make numeric literal underscore test dialect agnostic [#1685](https://github.com/apache/datafusion-sqlparser-rs/pull/1685) (iffyio) -- Extend lambda support for ClickHouse and DuckDB dialects [#1686](https://github.com/apache/datafusion-sqlparser-rs/pull/1686) (gstvg) -- Make TypedString preserve quote style [#1679](https://github.com/apache/datafusion-sqlparser-rs/pull/1679) (graup) -- Do not parse ASOF and MATCH_CONDITION as table factor aliases [#1698](https://github.com/apache/datafusion-sqlparser-rs/pull/1698) (yoavcloud) -- Add support for GRANT on some common Snowflake objects [#1699](https://github.com/apache/datafusion-sqlparser-rs/pull/1699) (yoavcloud) -- Add RETURNS TABLE() support for CREATE FUNCTION in Postgresql [#1687](https://github.com/apache/datafusion-sqlparser-rs/pull/1687) (remysaissy) -- Add parsing for GRANT ROLE and GRANT DATABASE ROLE in Snowflake dialect [#1689](https://github.com/apache/datafusion-sqlparser-rs/pull/1689) (yoavcloud) -- Add support for `CREATE/ALTER/DROP CONNECTOR` syntax [#1701](https://github.com/apache/datafusion-sqlparser-rs/pull/1701) (wugeer) -- Parse Snowflake COPY INTO [#1669](https://github.com/apache/datafusion-sqlparser-rs/pull/1669) (yoavcloud) -- Require space after -- to start single line comment in MySQL [#1705](https://github.com/apache/datafusion-sqlparser-rs/pull/1705) (hansott) -- Add suppport for Show Objects statement for the Snowflake parser [#1702](https://github.com/apache/datafusion-sqlparser-rs/pull/1702) (DanCodedThis) -- Fix incorrect parsing of JsonAccess bracket notation after cast in Snowflake [#1708](https://github.com/apache/datafusion-sqlparser-rs/pull/1708) (yoavcloud) -- Parse Postgres VARBIT datatype [#1703](https://github.com/apache/datafusion-sqlparser-rs/pull/1703) (mvzink) -- Implement FROM-first selects [#1713](https://github.com/apache/datafusion-sqlparser-rs/pull/1713) (mitsuhiko) -- Enable custom dialects to support `MATCH() AGAINST()` [#1719](https://github.com/apache/datafusion-sqlparser-rs/pull/1719) (joocer) -- Support group by cube/rollup etc in BigQuery [#1720](https://github.com/apache/datafusion-sqlparser-rs/pull/1720) (Groennbeck) -- Add support for MS Varbinary(MAX) (#1714) [#1715](https://github.com/apache/datafusion-sqlparser-rs/pull/1715) (TylerBrinks) -- Add supports for Hive's `SELECT ... GROUP BY .. GROUPING SETS` syntax [#1653](https://github.com/apache/datafusion-sqlparser-rs/pull/1653) (wugeer) -- Differentiate LEFT JOIN from LEFT OUTER JOIN [#1726](https://github.com/apache/datafusion-sqlparser-rs/pull/1726) (mvzink) -- Add support for Postgres `ALTER TYPE` [#1727](https://github.com/apache/datafusion-sqlparser-rs/pull/1727) (jvatic) -- Replace `Method` and `CompositeAccess` with `CompoundFieldAccess` [#1716](https://github.com/apache/datafusion-sqlparser-rs/pull/1716) (iffyio) -- Add support for `EXECUTE IMMEDIATE` [#1717](https://github.com/apache/datafusion-sqlparser-rs/pull/1717) (iffyio) -- Treat COLLATE like any other column option [#1731](https://github.com/apache/datafusion-sqlparser-rs/pull/1731) (mvzink) -- Add support for PostgreSQL/Redshift geometric operators [#1723](https://github.com/apache/datafusion-sqlparser-rs/pull/1723) (benrsatori) -- Implement SnowFlake ALTER SESSION [#1712](https://github.com/apache/datafusion-sqlparser-rs/pull/1712) (osipovartem) -- Extend Visitor trait for Value type [#1725](https://github.com/apache/datafusion-sqlparser-rs/pull/1725) (tomershaniii) -- Add support for `ORDER BY ALL` [#1724](https://github.com/apache/datafusion-sqlparser-rs/pull/1724) (PokIsemaine) -- Parse casting to array using double colon operator in Redshift [#1737](https://github.com/apache/datafusion-sqlparser-rs/pull/1737) (yoavcloud) -- Replace parallel condition/result vectors with single CaseWhen vector in Expr::Case. This fixes the iteration order when using the `Visitor` trait. Expressions are now visited in the same order as they appear in the sql source. [#1733](https://github.com/apache/datafusion-sqlparser-rs/pull/1733) (lovasoa) -- BigQuery: Add support for `BEGIN` [#1718](https://github.com/apache/datafusion-sqlparser-rs/pull/1718) (iffyio) -- Parse SIGNED INTEGER type in MySQL CAST [#1739](https://github.com/apache/datafusion-sqlparser-rs/pull/1739) (mvzink) -- Parse MySQL ALTER TABLE ALGORITHM option [#1745](https://github.com/apache/datafusion-sqlparser-rs/pull/1745) (mvzink) -- Random test cleanups use Expr::value [#1749](https://github.com/apache/datafusion-sqlparser-rs/pull/1749) (alamb) -- Parse ALTER TABLE AUTO_INCREMENT operation for MySQL [#1748](https://github.com/apache/datafusion-sqlparser-rs/pull/1748) (mvzink) - -## Credits - -Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor. - -``` - 10 Yoav Cohen - 9 Ifeanyi Ubah - 7 Michael Victor Zink - 3 Hans Ott - 2 Andrew Lamb - 2 Ophir LOJKINE - 2 Paul Grau - 2 Rémy SAISSY - 2 wugeer - 1 Armin Ronacher - 1 Artem Osipov - 1 AvivDavid-Satori - 1 Ayman Elkfrawy - 1 DanCodedThis - 1 Denys Tsomenko - 1 Emil - 1 Ian Alexander Joiner - 1 Jesse Stuart - 1 Justin Joyce - 1 Michael - 1 SiLe Zhou - 1 Tyler Brinks - 1 benrsatori - 1 gstvg - 1 tomershaniii -``` - -Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release. - diff --git a/changelog/0.56.0.md b/changelog/0.56.0.md deleted file mode 100644 index b3c8a67a..00000000 --- a/changelog/0.56.0.md +++ /dev/null @@ -1,102 +0,0 @@ - - -# sqlparser-rs 0.56.0 Changelog - -This release consists of 48 commits from 19 contributors. See credits at the end of this changelog for more information. - -**Other:** - -- Ignore escaped LIKE wildcards in MySQL [#1735](https://github.com/apache/datafusion-sqlparser-rs/pull/1735) (mvzink) -- Parse SET NAMES syntax in Postgres [#1752](https://github.com/apache/datafusion-sqlparser-rs/pull/1752) (mvzink) -- re-add support for nested comments in mssql [#1754](https://github.com/apache/datafusion-sqlparser-rs/pull/1754) (lovasoa) -- Extend support for INDEX parsing [#1707](https://github.com/apache/datafusion-sqlparser-rs/pull/1707) (LucaCappelletti94) -- Parse MySQL `ALTER TABLE DROP FOREIGN KEY` syntax [#1762](https://github.com/apache/datafusion-sqlparser-rs/pull/1762) (mvzink) -- add support for `with` clauses (CTEs) in `delete` statements [#1764](https://github.com/apache/datafusion-sqlparser-rs/pull/1764) (lovasoa) -- SET with a list of comma separated assignments [#1757](https://github.com/apache/datafusion-sqlparser-rs/pull/1757) (MohamedAbdeen21) -- Preserve MySQL-style `LIMIT , ` syntax [#1765](https://github.com/apache/datafusion-sqlparser-rs/pull/1765) (mvzink) -- Add support for `DROP MATERIALIZED VIEW` [#1743](https://github.com/apache/datafusion-sqlparser-rs/pull/1743) (iffyio) -- Add `CASE` and `IF` statement support [#1741](https://github.com/apache/datafusion-sqlparser-rs/pull/1741) (iffyio) -- BigQuery: Add support for `CREATE SCHEMA` options [#1742](https://github.com/apache/datafusion-sqlparser-rs/pull/1742) (iffyio) -- Snowflake: Support dollar quoted comments [#1755](https://github.com/apache/datafusion-sqlparser-rs/pull/1755) -- Add LOCK operation for ALTER TABLE [#1768](https://github.com/apache/datafusion-sqlparser-rs/pull/1768) (MohamedAbdeen21) -- Add support for `RAISE` statement [#1766](https://github.com/apache/datafusion-sqlparser-rs/pull/1766) (iffyio) -- Add GLOBAL context/modifier to SET statements [#1767](https://github.com/apache/datafusion-sqlparser-rs/pull/1767) (MohamedAbdeen21) -- Parse `SUBSTR` as alias for `SUBSTRING` [#1769](https://github.com/apache/datafusion-sqlparser-rs/pull/1769) (mvzink) -- SET statements: scope modifier for multiple assignments [#1772](https://github.com/apache/datafusion-sqlparser-rs/pull/1772) (MohamedAbdeen21) -- Support qualified column names in `MATCH AGAINST` clause [#1774](https://github.com/apache/datafusion-sqlparser-rs/pull/1774) (tomershaniii) -- Mysql: Add support for := operator [#1779](https://github.com/apache/datafusion-sqlparser-rs/pull/1779) (barsela1) -- Add cipherstash-proxy to list of users in README.md [#1782](https://github.com/apache/datafusion-sqlparser-rs/pull/1782) (coderdan) -- Fix typos [#1785](https://github.com/apache/datafusion-sqlparser-rs/pull/1785) (jayvdb) -- Add support for Databricks TIMESTAMP_NTZ. [#1781](https://github.com/apache/datafusion-sqlparser-rs/pull/1781) (romanb) -- Enable double-dot-notation for mssql. [#1787](https://github.com/apache/datafusion-sqlparser-rs/pull/1787) (romanb) -- Fix: Snowflake ALTER SESSION cannot be followed by other statements. [#1786](https://github.com/apache/datafusion-sqlparser-rs/pull/1786) (romanb) -- Add GreptimeDB to the "Users" in README [#1788](https://github.com/apache/datafusion-sqlparser-rs/pull/1788) (MichaelScofield) -- Extend snowflake grant options support [#1794](https://github.com/apache/datafusion-sqlparser-rs/pull/1794) (yoavcloud) -- Fix clippy lint on rust 1.86 [#1796](https://github.com/apache/datafusion-sqlparser-rs/pull/1796) (iffyio) -- Allow single quotes in EXTRACT() for Redshift. [#1795](https://github.com/apache/datafusion-sqlparser-rs/pull/1795) (romanb) -- MSSQL: Add support for functionality `MERGE` output clause [#1790](https://github.com/apache/datafusion-sqlparser-rs/pull/1790) (dilovancelik) -- Support additional DuckDB integer types such as HUGEINT, UHUGEINT, etc [#1797](https://github.com/apache/datafusion-sqlparser-rs/pull/1797) (alexander-beedie) -- Add support for MSSQL IF/ELSE statements. [#1791](https://github.com/apache/datafusion-sqlparser-rs/pull/1791) (romanb) -- Allow literal backslash escapes for string literals in Redshift dialect. [#1801](https://github.com/apache/datafusion-sqlparser-rs/pull/1801) (romanb) -- Add support for MySQL's STRAIGHT_JOIN join operator. [#1802](https://github.com/apache/datafusion-sqlparser-rs/pull/1802) (romanb) -- Snowflake COPY INTO target columns, select items and optional alias [#1805](https://github.com/apache/datafusion-sqlparser-rs/pull/1805) (yoavcloud) -- Fix tokenization of qualified identifiers with numeric prefix. [#1803](https://github.com/apache/datafusion-sqlparser-rs/pull/1803) (romanb) -- Add support for `INHERITS` option in `CREATE TABLE` statement [#1806](https://github.com/apache/datafusion-sqlparser-rs/pull/1806) (LucaCappelletti94) -- Add `DROP TRIGGER` support for SQL Server [#1813](https://github.com/apache/datafusion-sqlparser-rs/pull/1813) (aharpervc) -- Snowflake: support nested join without parentheses [#1799](https://github.com/apache/datafusion-sqlparser-rs/pull/1799) (barsela1) -- Add support for parenthesized subquery as `IN` predicate [#1793](https://github.com/apache/datafusion-sqlparser-rs/pull/1793) (adamchainz) -- Fix `STRAIGHT_JOIN` constraint when table alias is absent [#1812](https://github.com/apache/datafusion-sqlparser-rs/pull/1812) (killertux) -- Add support for `PRINT` statement for SQL Server [#1811](https://github.com/apache/datafusion-sqlparser-rs/pull/1811) (aharpervc) -- enable `supports_filter_during_aggregation` for Generic dialect [#1815](https://github.com/apache/datafusion-sqlparser-rs/pull/1815) (goldmedal) -- Add support for `XMLTABLE` [#1817](https://github.com/apache/datafusion-sqlparser-rs/pull/1817) (lovasoa) -- Add `CREATE FUNCTION` support for SQL Server [#1808](https://github.com/apache/datafusion-sqlparser-rs/pull/1808) (aharpervc) -- Add `OR ALTER` support for `CREATE VIEW` [#1818](https://github.com/apache/datafusion-sqlparser-rs/pull/1818) (aharpervc) -- Add `DECLARE ... CURSOR FOR` support for SQL Server [#1821](https://github.com/apache/datafusion-sqlparser-rs/pull/1821) (aharpervc) -- Handle missing login in changelog generate script [#1823](https://github.com/apache/datafusion-sqlparser-rs/pull/1823) (iffyio) -- Snowflake: Add support for `CONNECT_BY_ROOT` [#1780](https://github.com/apache/datafusion-sqlparser-rs/pull/1780) (tomershaniii) - -## Credits - -Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor. - -``` - 8 Roman Borschel - 6 Ifeanyi Ubah - 5 Andrew Harper - 5 Michael Victor Zink - 4 Mohamed Abdeen - 3 Ophir LOJKINE - 2 Luca Cappelletti - 2 Yoav Cohen - 2 bar sela - 2 tomershaniii - 1 Adam Johnson - 1 Aleksei Piianin - 1 Alexander Beedie - 1 Bruno Clemente - 1 Dan Draper - 1 DilovanCelik - 1 Jax Liu - 1 John Vandenberg - 1 LFC -``` - -Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release. - diff --git a/changelog/0.57.0.md b/changelog/0.57.0.md deleted file mode 100644 index 200bb73a..00000000 --- a/changelog/0.57.0.md +++ /dev/null @@ -1,95 +0,0 @@ - - -# sqlparser-rs 0.57.0 Changelog - -This release consists of 39 commits from 19 contributors. See credits at the end of this changelog for more information. - -**Implemented enhancements:** - -- feat: Hive: support `SORT BY` direction [#1873](https://github.com/apache/datafusion-sqlparser-rs/pull/1873) (chenkovsky) - -**Other:** - -- Support some of pipe operators [#1759](https://github.com/apache/datafusion-sqlparser-rs/pull/1759) (simonvandel) -- Added support for `DROP DOMAIN` [#1828](https://github.com/apache/datafusion-sqlparser-rs/pull/1828) (LucaCappelletti94) -- Improve support for cursors for SQL Server [#1831](https://github.com/apache/datafusion-sqlparser-rs/pull/1831) (aharpervc) -- Add all missing table options to be handled in any order [#1747](https://github.com/apache/datafusion-sqlparser-rs/pull/1747) (benrsatori) -- Add `CREATE TRIGGER` support for SQL Server [#1810](https://github.com/apache/datafusion-sqlparser-rs/pull/1810) (aharpervc) -- Added support for `CREATE DOMAIN` [#1830](https://github.com/apache/datafusion-sqlparser-rs/pull/1830) (LucaCappelletti94) -- Allow stored procedures to be defined without `BEGIN`/`END` [#1834](https://github.com/apache/datafusion-sqlparser-rs/pull/1834) (aharpervc) -- Add support for the MATCH and REGEXP binary operators [#1840](https://github.com/apache/datafusion-sqlparser-rs/pull/1840) (lovasoa) -- Fix: parsing ident starting with underscore in certain dialects [#1835](https://github.com/apache/datafusion-sqlparser-rs/pull/1835) (MohamedAbdeen21) -- implement pretty-printing with `{:#}` [#1847](https://github.com/apache/datafusion-sqlparser-rs/pull/1847) (lovasoa) -- Fix big performance issue in string serialization [#1848](https://github.com/apache/datafusion-sqlparser-rs/pull/1848) (lovasoa) -- Add support for `DENY` statements [#1836](https://github.com/apache/datafusion-sqlparser-rs/pull/1836) (aharpervc) -- Postgresql: Add `REPLICA IDENTITY` operation for `ALTER TABLE` [#1844](https://github.com/apache/datafusion-sqlparser-rs/pull/1844) (MohamedAbdeen21) -- Add support for INCLUDE/EXCLUDE NULLS for UNPIVOT [#1849](https://github.com/apache/datafusion-sqlparser-rs/pull/1849) (Vedin) -- pretty print improvements [#1851](https://github.com/apache/datafusion-sqlparser-rs/pull/1851) (lovasoa) -- fix new rust 1.87 cargo clippy warnings [#1856](https://github.com/apache/datafusion-sqlparser-rs/pull/1856) (lovasoa) -- Update criterion requirement from 0.5 to 0.6 in /sqlparser_bench [#1857](https://github.com/apache/datafusion-sqlparser-rs/pull/1857) (dependabot[bot]) -- pretty-print CREATE TABLE statements [#1854](https://github.com/apache/datafusion-sqlparser-rs/pull/1854) (lovasoa) -- pretty-print CREATE VIEW statements [#1855](https://github.com/apache/datafusion-sqlparser-rs/pull/1855) (lovasoa) -- Handle optional datatypes properly in `CREATE FUNCTION` statements [#1826](https://github.com/apache/datafusion-sqlparser-rs/pull/1826) (LucaCappelletti94) -- Mysql: Add `SRID` column option [#1852](https://github.com/apache/datafusion-sqlparser-rs/pull/1852) (MohamedAbdeen21) -- Add support for table valued functions for SQL Server [#1839](https://github.com/apache/datafusion-sqlparser-rs/pull/1839) (aharpervc) -- Keep the COLUMN keyword only if it exists when dropping the column [#1862](https://github.com/apache/datafusion-sqlparser-rs/pull/1862) (git-hulk) -- Add support for parameter default values in SQL Server [#1866](https://github.com/apache/datafusion-sqlparser-rs/pull/1866) (aharpervc) -- Add support for `TABLESAMPLE` pipe operator [#1860](https://github.com/apache/datafusion-sqlparser-rs/pull/1860) (hendrikmakait) -- Adds support for mysql's drop index [#1864](https://github.com/apache/datafusion-sqlparser-rs/pull/1864) (dmzmk) -- Fix: GROUPING SETS accept values without parenthesis [#1867](https://github.com/apache/datafusion-sqlparser-rs/pull/1867) (Vedin) -- Add ICEBERG keyword support to ALTER TABLE statement [#1869](https://github.com/apache/datafusion-sqlparser-rs/pull/1869) (osipovartem) -- MySQL: Support `index_name` in FK constraints [#1871](https://github.com/apache/datafusion-sqlparser-rs/pull/1871) (MohamedAbdeen21) -- Postgres: Apply `ONLY` keyword per table in TRUNCATE stmt [#1872](https://github.com/apache/datafusion-sqlparser-rs/pull/1872) (MohamedAbdeen21) -- Fix `CASE` expression spans [#1874](https://github.com/apache/datafusion-sqlparser-rs/pull/1874) (eliaperantoni) -- MySQL: `[[NOT] ENFORCED]` in CHECK constraint [#1870](https://github.com/apache/datafusion-sqlparser-rs/pull/1870) (MohamedAbdeen21) -- Add support for `CREATE SCHEMA WITH ( )` [#1877](https://github.com/apache/datafusion-sqlparser-rs/pull/1877) (utay) -- Add support for `ALTER TABLE DROP INDEX` [#1865](https://github.com/apache/datafusion-sqlparser-rs/pull/1865) (vimko) -- chore: Replace archived actions-rs/install action [#1876](https://github.com/apache/datafusion-sqlparser-rs/pull/1876) (assignUser) -- Allow `IF NOT EXISTS` after table name for Snowflake [#1881](https://github.com/apache/datafusion-sqlparser-rs/pull/1881) (bombsimon) -- Support `DISTINCT AS { STRUCT | VALUE }` for BigQuery [#1880](https://github.com/apache/datafusion-sqlparser-rs/pull/1880) (bombsimon) - -## Credits - -Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor. - -``` - 7 Ophir LOJKINE - 6 Andrew Harper - 6 Mohamed Abdeen - 3 Luca Cappelletti - 2 Denys Tsomenko - 2 Simon Sawert - 1 Andrew Lamb - 1 Artem Osipov - 1 Chen Chongchen - 1 Dmitriy Mazurin - 1 Elia Perantoni - 1 Hendrik Makait - 1 Jacob Wujciak-Jens - 1 Simon Vandel Sillesen - 1 Yannick Utard - 1 benrsatori - 1 dependabot[bot] - 1 hulk - 1 vimko -``` - -Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release. - diff --git a/derive/Cargo.toml b/derive/Cargo.toml index 7b647730..0c5852c4 100644 --- a/derive/Cargo.toml +++ b/derive/Cargo.toml @@ -17,8 +17,8 @@ [package] name = "sqlparser_derive" -description = "Procedural (proc) macros for sqlparser" -version = "0.3.0" +description = "proc macro for sqlparser" +version = "0.2.2" authors = ["sqlparser-rs authors"] homepage = "https://github.com/sqlparser-rs/sqlparser-rs" documentation = "https://docs.rs/sqlparser_derive/" @@ -28,7 +28,6 @@ license = "Apache-2.0" include = [ "src/**/*.rs", "Cargo.toml", - "LICENSE.TXT", ] edition = "2021" diff --git a/derive/LICENSE.TXT b/derive/LICENSE.TXT deleted file mode 120000 index 14259afe..00000000 --- a/derive/LICENSE.TXT +++ /dev/null @@ -1 +0,0 @@ -../LICENSE.TXT \ No newline at end of file diff --git a/derive/README.md b/derive/README.md index b5ccc69e..aa70e7c7 100644 --- a/derive/README.md +++ b/derive/README.md @@ -151,55 +151,6 @@ visitor.post_visit_expr() visitor.post_visit_expr() ``` -If the field is a `Option` and add `#[with = "visit_xxx"]` to the field, the generated code -will try to access the field only if it is `Some`: - -```rust -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct ShowStatementIn { - pub clause: ShowStatementInClause, - pub parent_type: Option, - #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] - pub parent_name: Option, -} -``` - -This will generate - -```rust -impl sqlparser::ast::Visit for ShowStatementIn { - fn visit( - &self, - visitor: &mut V, - ) -> ::std::ops::ControlFlow { - sqlparser::ast::Visit::visit(&self.clause, visitor)?; - sqlparser::ast::Visit::visit(&self.parent_type, visitor)?; - if let Some(value) = &self.parent_name { - visitor.pre_visit_relation(value)?; - sqlparser::ast::Visit::visit(value, visitor)?; - visitor.post_visit_relation(value)?; - } - ::std::ops::ControlFlow::Continue(()) - } -} - -impl sqlparser::ast::VisitMut for ShowStatementIn { - fn visit( - &mut self, - visitor: &mut V, - ) -> ::std::ops::ControlFlow { - sqlparser::ast::VisitMut::visit(&mut self.clause, visitor)?; - sqlparser::ast::VisitMut::visit(&mut self.parent_type, visitor)?; - if let Some(value) = &mut self.parent_name { - visitor.pre_visit_relation(value)?; - sqlparser::ast::VisitMut::visit(value, visitor)?; - visitor.post_visit_relation(value)?; - } - ::std::ops::ControlFlow::Continue(()) - } -} -``` - ## Releasing This crate's release is not automated. Instead it is released manually as needed diff --git a/derive/src/lib.rs b/derive/src/lib.rs index 08c5c5db..5ad1607f 100644 --- a/derive/src/lib.rs +++ b/derive/src/lib.rs @@ -21,9 +21,8 @@ use syn::spanned::Spanned; use syn::{ parse::{Parse, ParseStream}, parse_macro_input, parse_quote, Attribute, Data, DeriveInput, Fields, GenericParam, Generics, - Ident, Index, LitStr, Meta, Token, Type, TypePath, + Ident, Index, LitStr, Meta, Token, }; -use syn::{Path, PathArguments}; /// Implementation of `[#derive(Visit)]` #[proc_macro_derive(VisitMut, attributes(visit))] @@ -78,10 +77,7 @@ fn derive_visit(input: proc_macro::TokenStream, visit_type: &VisitType) -> proc_ let expanded = quote! { // The generated impl. - // Note that it uses [`recursive::recursive`] to protect from stack overflow. - // See tests in https://github.com/apache/datafusion-sqlparser-rs/pull/1522/ for more info. impl #impl_generics sqlparser::ast::#visit_trait for #name #ty_generics #where_clause { - #[cfg_attr(feature = "recursive-protection", recursive::recursive)] fn visit( &#modifier self, visitor: &mut V @@ -186,21 +182,9 @@ fn visit_children( Fields::Named(fields) => { let recurse = fields.named.iter().map(|f| { let name = &f.ident; - let is_option = is_option(&f.ty); let attributes = Attributes::parse(&f.attrs); - if is_option && attributes.with.is_some() { - let (pre_visit, post_visit) = attributes.visit(quote!(value)); - quote_spanned!(f.span() => - if let Some(value) = &#modifier self.#name { - #pre_visit sqlparser::ast::#visit_trait::visit(value, visitor)?; #post_visit - } - ) - } else { - let (pre_visit, post_visit) = attributes.visit(quote!(&#modifier self.#name)); - quote_spanned!(f.span() => - #pre_visit sqlparser::ast::#visit_trait::visit(&#modifier self.#name, visitor)?; #post_visit - ) - } + let (pre_visit, post_visit) = attributes.visit(quote!(&#modifier self.#name)); + quote_spanned!(f.span() => #pre_visit sqlparser::ast::#visit_trait::visit(&#modifier self.#name, visitor)?; #post_visit) }); quote! { #(#recurse)* @@ -272,20 +256,3 @@ fn visit_children( Data::Union(_) => unimplemented!(), } } - -fn is_option(ty: &Type) -> bool { - if let Type::Path(TypePath { - path: Path { segments, .. }, - .. - }) = ty - { - if let Some(segment) = segments.last() { - if segment.ident == "Option" { - if let PathArguments::AngleBracketed(args) = &segment.arguments { - return args.args.len() == 1; - } - } - } - } - false -} diff --git a/dev/release/README.md b/dev/release/README.md index c3018dd6..588e083b 100644 --- a/dev/release/README.md +++ b/dev/release/README.md @@ -146,12 +146,6 @@ Move artifacts to the release location in SVN, using the `release-tarball.sh` sc ```shell ./dev/release/release-tarball.sh 0.52.0 1 ``` - -Promote the rc tag to the release tag -```shell -git tag v0.52.0 v0.52.0-rc3 -git push apache v0.52.0 -``` Congratulations! The release is now official! @@ -175,7 +169,7 @@ Verify that the Cargo.toml in the tarball contains the correct version (e.g. `version = "0.52.0"`) and then publish the crates by running the following commands ```shell -cargo publish +(cd sqlparser && cargo publish) ``` If necessary, also publish the `sqlparser_derive` crate: diff --git a/dev/release/generate-changelog.py b/dev/release/generate-changelog.py index 6f2b7c41..52fd2e54 100755 --- a/dev/release/generate-changelog.py +++ b/dev/release/generate-changelog.py @@ -28,8 +28,7 @@ def print_pulls(repo_name, title, pulls): print() for (pull, commit) in pulls: url = "https://github.com/{}/pull/{}".format(repo_name, pull.number) - author = f"({commit.author.login})" if commit.author else '' - print("- {} [#{}]({}) {}".format(pull.title, pull.number, url, author)) + print("- {} [#{}]({}) ({})".format(pull.title, pull.number, url, commit.author.login)) print() @@ -162,4 +161,4 @@ def cli(args=None): generate_changelog(repo, project, args.tag1, args.tag2, args.version) if __name__ == "__main__": - cli() + cli() \ No newline at end of file diff --git a/dev/release/rat_exclude_files.txt b/dev/release/rat_exclude_files.txt index 280b1bce..a567eda9 100644 --- a/dev/release/rat_exclude_files.txt +++ b/dev/release/rat_exclude_files.txt @@ -1,8 +1,6 @@ +# Files to exclude from the Apache Rat (license) check +.gitignore .tool-versions -target/* -**.gitignore -rat.txt dev/release/rat_exclude_files.txt -sqlparser_bench/img/flamegraph.svg -**Cargo.lock -filtered_rat.txt +fuzz/.gitignore + diff --git a/examples/cli.rs b/examples/cli.rs index 08a40a6d..8a5d6501 100644 --- a/examples/cli.rs +++ b/examples/cli.rs @@ -17,11 +17,9 @@ #![warn(clippy::all)] -//! A small command-line app to run the parser. -//! Run with `cargo run --example cli` - +/// A small command-line app to run the parser. +/// Run with `cargo run --example cli` use std::fs; -use std::io::{stdin, Read}; use simple_logger::SimpleLogger; use sqlparser::dialect::*; @@ -40,9 +38,6 @@ $ cargo run --example cli FILENAME.sql [--dialectname] To print the parse results as JSON: $ cargo run --feature json_example --example cli FILENAME.sql [--dialectname] -To read from stdin instead of a file: -$ cargo run --example cli - [--dialectname] - "#, ); @@ -62,18 +57,9 @@ $ cargo run --example cli - [--dialectname] s => panic!("Unexpected parameter: {s}"), }; - let contents = if filename == "-" { - println!("Parsing from stdin using {dialect:?}"); - let mut buf = Vec::new(); - stdin() - .read_to_end(&mut buf) - .expect("failed to read from stdin"); - String::from_utf8(buf).expect("stdin content wasn't valid utf8") - } else { - println!("Parsing from file '{}' using {:?}", &filename, dialect); - fs::read_to_string(&filename) - .unwrap_or_else(|_| panic!("Unable to read the file {}", &filename)) - }; + println!("Parsing from file '{}' using {:?}", &filename, dialect); + let contents = fs::read_to_string(&filename) + .unwrap_or_else(|_| panic!("Unable to read the file {}", &filename)); let without_bom = if contents.chars().next().unwrap() as u64 != 0xfeff { contents.as_str() } else { diff --git a/sqlparser_bench/Cargo.toml b/sqlparser_bench/Cargo.toml index 01c59be7..9c33658a 100644 --- a/sqlparser_bench/Cargo.toml +++ b/sqlparser_bench/Cargo.toml @@ -17,7 +17,6 @@ [package] name = "sqlparser_bench" -description = "Benchmarks for sqlparser" version = "0.1.0" authors = ["Dandandan "] edition = "2018" @@ -26,7 +25,7 @@ edition = "2018" sqlparser = { path = "../" } [dev-dependencies] -criterion = "0.6" +criterion = "0.5" [[bench]] name = "sqlparser_bench" diff --git a/sqlparser_bench/README.md b/sqlparser_bench/README.md deleted file mode 100644 index 7f2c2625..00000000 --- a/sqlparser_bench/README.md +++ /dev/null @@ -1,42 +0,0 @@ - - -Benchmarks for sqlparser. See [the main README](../README.md) for more information. - -Note: this is in a separate, non workspace crate to avoid adding a dependency -on `criterion` to the main crate (which complicates testing without std). - -# Running Benchmarks - -```shell -cargo bench --bench sqlparser_bench -``` - -# Profiling - -Note you can generate a [flamegraph] using the following command: - -```shell -cargo flamegraph --bench sqlparser_bench -``` - -[flamegraph]: https://crates.io/crates/flamegraph - -Here is an example flamegraph: -![flamegraph](img/flamegraph.svg) diff --git a/sqlparser_bench/benches/sqlparser_bench.rs b/sqlparser_bench/benches/sqlparser_bench.rs index 6132ee43..27c58b45 100644 --- a/sqlparser_bench/benches/sqlparser_bench.rs +++ b/sqlparser_bench/benches/sqlparser_bench.rs @@ -23,9 +23,9 @@ fn basic_queries(c: &mut Criterion) { let mut group = c.benchmark_group("sqlparser-rs parsing benchmark"); let dialect = GenericDialect {}; - let string = "SELECT * FROM my_table WHERE 1 = 1"; + let string = "SELECT * FROM table WHERE 1 = 1"; group.bench_function("sqlparser::select", |b| { - b.iter(|| Parser::parse_sql(&dialect, string).unwrap()); + b.iter(|| Parser::parse_sql(&dialect, string)); }); let with_query = " @@ -33,52 +33,14 @@ fn basic_queries(c: &mut Criterion) { SELECT MAX(a) AS max_a, COUNT(b) AS b_num, user_id - FROM MY_TABLE + FROM TABLE GROUP BY user_id ) - SELECT * FROM my_table + SELECT * FROM table LEFT JOIN derived USING (user_id) "; group.bench_function("sqlparser::with_select", |b| { - b.iter(|| Parser::parse_sql(&dialect, with_query).unwrap()); - }); - - let large_statement = { - let expressions = (0..1000) - .map(|n| format!("FN_{n}(COL_{n})")) - .collect::>() - .join(", "); - let tables = (0..1000) - .map(|n| format!("TABLE_{n}")) - .collect::>() - .join(" JOIN "); - let where_condition = (0..1000) - .map(|n| format!("COL_{n} = {n}")) - .collect::>() - .join(" OR "); - let order_condition = (0..1000) - .map(|n| format!("COL_{n} DESC")) - .collect::>() - .join(", "); - - format!( - "SELECT {expressions} FROM {tables} WHERE {where_condition} ORDER BY {order_condition}" - ) - }; - - group.bench_function("parse_large_statement", |b| { - b.iter(|| Parser::parse_sql(&dialect, std::hint::black_box(large_statement.as_str()))); - }); - - let large_statement = Parser::parse_sql(&dialect, large_statement.as_str()) - .unwrap() - .pop() - .unwrap(); - - group.bench_function("format_large_statement", |b| { - b.iter(|| { - let _formatted_query = large_statement.to_string(); - }); + b.iter(|| Parser::parse_sql(&dialect, with_query)); }); } diff --git a/sqlparser_bench/img/flamegraph.svg b/sqlparser_bench/img/flamegraph.svg deleted file mode 100644 index 0aaa17e0..00000000 --- a/sqlparser_bench/img/flamegraph.svg +++ /dev/null @@ -1,491 +0,0 @@ -Flame Graph Reset ZoomSearch sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<plotters_svg::svg::SVGBackend> (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<plotters_svg::svg::SVGBackend as core::ops::drop::Drop>::drop (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::kde::sweep_and_estimate (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<criterion::plot::plotters_backend::PlottersBackend as criterion::plot::Plotter>::abs_distributions (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::plot::plotters_backend::distributions::abs_distributions (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<criterion::plot::plotters_backend::PlottersBackend as criterion::plot::Plotter>::pdf (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<criterion::html::Html as criterion::report::Report>::measurement_complete (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`criterion::estimate::build_estimates (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates (7 samples, 0.01%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_malloc.dylib`_free (38 samples, 0.08%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (32 samples, 0.07%)libsystem_malloc.dylib`_szone_free (13 samples, 0.03%)libsystem_malloc.dylib`small_free_list_add_ptr (12 samples, 0.03%)libsystem_malloc.dylib`small_free_list_remove_ptr (5 samples, 0.01%)libsystem_malloc.dylib`free_small (55 samples, 0.12%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::OrderByExpr as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (10 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (21 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (164 samples, 0.34%)sqlparser_bench-959bc5267970ca34`core::fmt::write (128 samples, 0.27%)libdyld.dylib`tlv_get_addr (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::write (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::OrderByExpr as core::fmt::Display>::fmt (435 samples, 0.91%)sqlparser_bench-959bc5267970ca34`core::fmt::write (309 samples, 0.65%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (629 samples, 1.32%)sqlparser_bench-959bc5267970ca34`core::fmt::write (578 samples, 1.21%)sqlparser_bench-959bc5267970ca34`core::fmt::write (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::OrderBy as core::fmt::Display>::fmt (661 samples, 1.39%)sqlparser_bench-959bc5267970ca34`core::fmt::write (661 samples, 1.39%)sqlparser_bench-959bc5267970ca34`core::fmt::write (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::SelectItem as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (14 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Join as core::fmt::Display>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<<sqlparser::ast::query::Join as core::fmt::Display>::fmt::suffix::Suffix as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::TableFactor as core::fmt::Display>::fmt (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (77 samples, 0.16%)sqlparser_bench-959bc5267970ca34`<<sqlparser::ast::query::Join as core::fmt::Display>::fmt::suffix::Suffix as core::fmt::Display>::fmt (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (23 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::fmt::write (192 samples, 0.40%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (347 samples, 0.73%)sqlparser_bench-959bc5267970ca34`core::fmt::write (321 samples, 0.67%)sqlparser_bench-959bc5267970ca34`core::fmt::write (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::TableFactor as core::fmt::Display>::fmt (473 samples, 0.99%)sqlparser_bench-959bc5267970ca34`core::fmt::write (401 samples, 0.84%)sqlparser_bench-959bc5267970ca34`core::fmt::write (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Join as core::fmt::Display>::fmt (859 samples, 1.80%)s..sqlparser_bench-959bc5267970ca34`core::fmt::write (753 samples, 1.58%)sqlparser_bench-959bc5267970ca34`core::fmt::write (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (931 samples, 1.96%)s..sqlparser_bench-959bc5267970ca34`core::fmt::write (902 samples, 1.89%)s..sqlparser_bench-959bc5267970ca34`core::fmt::write (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (11 samples, 0.02%)libdyld.dylib`tlv_get_addr (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Function as core::fmt::Display>::fmt (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArguments as core::fmt::Display>::fmt (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::ObjectName as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgumentList as core::fmt::Display>::fmt (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgExpr as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (10 samples, 0.02%)libdyld.dylib`tlv_get_addr (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (231 samples, 0.49%)sqlparser_bench-959bc5267970ca34`core::fmt::write (188 samples, 0.39%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_str (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::write (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgExpr as core::fmt::Display>::fmt (450 samples, 0.95%)sqlparser_bench-959bc5267970ca34`core::fmt::write (408 samples, 0.86%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArg as core::fmt::Display>::fmt (528 samples, 1.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (500 samples, 1.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (711 samples, 1.49%)sqlparser_bench-959bc5267970ca34`core::fmt::write (645 samples, 1.35%)sqlparser_bench-959bc5267970ca34`core::fmt::write (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgumentList as core::fmt::Display>::fmt (805 samples, 1.69%)sqlparser_bench-959bc5267970ca34`core::fmt::write (772 samples, 1.62%)sqlparser_bench-959bc5267970ca34`core::fmt::write (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArguments as core::fmt::Display>::fmt (1,024 samples, 2.15%)s..sqlparser_bench-959bc5267970ca34`core::fmt::write (972 samples, 2.04%)s..sqlparser_bench-959bc5267970ca34`core::fmt::write (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (18 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (50 samples, 0.11%)libsystem_malloc.dylib`szone_realloc (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_realloc (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (8 samples, 0.02%)libsystem_malloc.dylib`_realloc (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (321 samples, 0.67%)sqlparser_bench-959bc5267970ca34`core::fmt::write (219 samples, 0.46%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_str (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::ObjectName as core::fmt::Display>::fmt (425 samples, 0.89%)sqlparser_bench-959bc5267970ca34`core::fmt::write (390 samples, 0.82%)sqlparser_bench-959bc5267970ca34`core::fmt::write (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Function as core::fmt::Display>::fmt (1,627 samples, 3.42%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,551 samples, 3.26%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,728 samples, 3.63%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,678 samples, 3.52%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::SelectItem as core::fmt::Display>::fmt (1,910 samples, 4.01%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,868 samples, 3.92%)sqlp..sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (3,038 samples, 6.38%)sqlparse..sqlparser_bench-959bc5267970ca34`core::fmt::write (2,973 samples, 6.24%)sqlparse..sqlparser_bench-959bc5267970ca34`core::fmt::write (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::write (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::write (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::write (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::write (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`core::fmt::write (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`core::fmt::write (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`core::fmt::write (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`core::fmt::write (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (59 samples, 0.12%)sqlparser_bench-959bc5267970ca34`core::fmt::write (59 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::write (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (67 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::write (67 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::write (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`core::fmt::write (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (78 samples, 0.16%)sqlparser_bench-959bc5267970ca34`core::fmt::write (78 samples, 0.16%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (75 samples, 0.16%)sqlparser_bench-959bc5267970ca34`core::fmt::write (75 samples, 0.16%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (74 samples, 0.16%)sqlparser_bench-959bc5267970ca34`core::fmt::write (74 samples, 0.16%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (82 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (82 samples, 0.17%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (81 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (81 samples, 0.17%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Query as core::fmt::Display>::fmt (3,806 samples, 7.99%)sqlparser_b..sqlparser_bench-959bc5267970ca34`core::fmt::write (3,806 samples, 7.99%)sqlparser_b..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::SetExpr as core::fmt::Display>::fmt (3,145 samples, 6.60%)sqlparser..sqlparser_bench-959bc5267970ca34`core::fmt::write (3,145 samples, 6.60%)sqlparser..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Select as core::fmt::Display>::fmt (3,144 samples, 6.60%)sqlparser..sqlparser_bench-959bc5267970ca34`core::fmt::write (3,143 samples, 6.60%)sqlparser..sqlparser_bench-959bc5267970ca34`core::fmt::write (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Statement as core::fmt::Display>::fmt (3,809 samples, 8.00%)sqlparser_b..sqlparser_bench-959bc5267970ca34`core::fmt::write (3,808 samples, 8.00%)sqlparser_b..sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (32 samples, 0.07%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`_free (19 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (17 samples, 0.04%)libsystem_malloc.dylib`_szone_free (12 samples, 0.03%)libsystem_malloc.dylib`small_free_list_add_ptr (18 samples, 0.04%)libsystem_malloc.dylib`small_free_list_remove_ptr (8 samples, 0.02%)libsystem_malloc.dylib`free_small (83 samples, 0.17%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (11 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`free_small (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)libsystem_malloc.dylib`free_small (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::vec::Vec<T,A> as core::ops::drop::Drop>::drop (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::TableFactor> (11 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (13 samples, 0.03%)libsystem_malloc.dylib`free_small (12 samples, 0.03%)libsystem_malloc.dylib`small_free_list_add_ptr (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::Cte> (128 samples, 0.27%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::Query> (112 samples, 0.24%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::SetExpr> (91 samples, 0.19%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::SelectItem> (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Function> (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::FunctionArgumentList> (17 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_malloc.dylib`free_medium (39 samples, 0.08%)libsystem_kernel.dylib`madvise (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::OrderBy> (58 samples, 0.12%)libsystem_malloc.dylib`nanov2_madvise_block (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_madvise_block_locked (5 samples, 0.01%)libsystem_kernel.dylib`madvise (5 samples, 0.01%)libsystem_malloc.dylib`_free (16 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (16 samples, 0.03%)libsystem_malloc.dylib`_szone_free (12 samples, 0.03%)libsystem_malloc.dylib`free_medium (18 samples, 0.04%)libsystem_kernel.dylib`madvise (18 samples, 0.04%)libsystem_malloc.dylib`small_free_list_add_ptr (10 samples, 0.02%)libsystem_malloc.dylib`small_free_list_find_by_ptr (9 samples, 0.02%)libsystem_malloc.dylib`free_small (48 samples, 0.10%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (8 samples, 0.02%)libsystem_malloc.dylib`tiny_free_list_add_ptr (8 samples, 0.02%)libsystem_malloc.dylib`free_tiny (44 samples, 0.09%)libsystem_malloc.dylib`tiny_free_no_lock (30 samples, 0.06%)libsystem_malloc.dylib`tiny_free_list_remove_ptr (10 samples, 0.02%)libsystem_malloc.dylib`_free (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (52 samples, 0.11%)libsystem_malloc.dylib`nanov2_madvise_block (13 samples, 0.03%)libsystem_malloc.dylib`nanov2_madvise_block_locked (13 samples, 0.03%)libsystem_kernel.dylib`madvise (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::vec::Vec<T,A> as core::ops::drop::Drop>::drop (131 samples, 0.28%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::TableFactor> (96 samples, 0.20%)libsystem_platform.dylib`_platform_memset (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`tiny_free_list_add_ptr (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (28 samples, 0.06%)libsystem_malloc.dylib`tiny_free_no_lock (16 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (70 samples, 0.15%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (13 samples, 0.03%)libsystem_malloc.dylib`nanov2_madvise_block (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_madvise_block_locked (6 samples, 0.01%)libsystem_kernel.dylib`madvise (6 samples, 0.01%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (12 samples, 0.03%)libsystem_malloc.dylib`free_small (38 samples, 0.08%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Function> (95 samples, 0.20%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::FunctionArgumentList> (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::SetExpr> (510 samples, 1.07%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::query::Query>> (873 samples, 1.83%)s..sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::Query> (854 samples, 1.79%)s..sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (175 samples, 0.37%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Whitespace> (57 samples, 0.12%)libsystem_malloc.dylib`_nanov2_free (123 samples, 0.26%)libsystem_malloc.dylib`free_medium (62 samples, 0.13%)libsystem_kernel.dylib`madvise (62 samples, 0.13%)libsystem_malloc.dylib`free_small (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_madvise_block (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_madvise_block_locked (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (83 samples, 0.17%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (17 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (67 samples, 0.14%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (48 samples, 0.10%)libsystem_malloc.dylib`szone_malloc_should_clear (31 samples, 0.07%)libsystem_malloc.dylib`small_malloc_should_clear (25 samples, 0.05%)libsystem_malloc.dylib`small_malloc_from_free_list (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (12 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_malloc (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (154 samples, 0.32%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::alloc::exchange_malloc (48 samples, 0.10%)libsystem_malloc.dylib`szone_malloc_should_clear (48 samples, 0.10%)libsystem_malloc.dylib`small_malloc_should_clear (36 samples, 0.08%)libsystem_malloc.dylib`small_malloc_from_free_list (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (8 samples, 0.02%)libsystem_malloc.dylib`szone_malloc_should_clear (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_token (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::alloc::exchange_malloc (11 samples, 0.02%)libsystem_malloc.dylib`szone_malloc_should_clear (11 samples, 0.02%)libsystem_malloc.dylib`small_malloc_should_clear (8 samples, 0.02%)libsystem_malloc.dylib`small_malloc_from_free_list (7 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (16 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_keyword (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_all_or_distinct (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (14 samples, 0.03%)libsystem_malloc.dylib`szone_malloc_should_clear (6 samples, 0.01%)libsystem_malloc.dylib`_free (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (13 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_alias (41 samples, 0.09%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (28 samples, 0.06%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (9 samples, 0.02%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_pointer_size (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_realloc (25 samples, 0.05%)libsystem_malloc.dylib`nanov2_malloc (10 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (47 samples, 0.10%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`_realloc (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (62 samples, 0.13%)libsystem_malloc.dylib`nanov2_size (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (76 samples, 0.16%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (115 samples, 0.24%)sqlparser_bench-959bc5267970ca34`core::fmt::write (109 samples, 0.23%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (106 samples, 0.22%)sqlparser_bench-959bc5267970ca34`core::fmt::write (104 samples, 0.22%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (16 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (16 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (81 samples, 0.17%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (16 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_type_modifiers (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (158 samples, 0.33%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (168 samples, 0.35%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_compound_field_access (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_token (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (14 samples, 0.03%)libsystem_malloc.dylib`szone_malloc_should_clear (14 samples, 0.03%)libsystem_malloc.dylib`small_malloc_should_clear (11 samples, 0.02%)libsystem_malloc.dylib`small_malloc_from_free_list (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (23 samples, 0.05%)libsystem_malloc.dylib`_realloc (21 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (119 samples, 0.25%)sqlparser_bench-959bc5267970ca34`core::fmt::write (105 samples, 0.22%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_named_arg_operator (134 samples, 0.28%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (19 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (5 samples, 0.01%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (24 samples, 0.05%)libsystem_malloc.dylib`nanov2_malloc (10 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (34 samples, 0.07%)libsystem_malloc.dylib`_realloc (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::write (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (80 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (77 samples, 0.16%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_parse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (198 samples, 0.42%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_args (436 samples, 0.92%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (264 samples, 0.55%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (241 samples, 0.51%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (480 samples, 1.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_argument_list (541 samples, 1.14%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_call (564 samples, 1.18%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (641 samples, 1.35%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (1,035 samples, 2.17%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select_item (1,271 samples, 2.67%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (1,200 samples, 2.52%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,148 samples, 2.41%)sq..libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_table_alias (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_alias (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_factor (90 samples, 0.19%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_tokens (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (1,454 samples, 3.05%)sql..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_and_joins (122 samples, 0.26%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_realloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (18 samples, 0.04%)libsystem_malloc.dylib`_realloc (11 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (106 samples, 0.22%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_group_by (154 samples, 0.32%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (135 samples, 0.28%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query_body (1,727 samples, 3.63%)sqlp..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select (1,681 samples, 3.53%)sql..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_cte (1,854 samples, 3.89%)sqlp..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query (1,798 samples, 3.78%)sqlp..libsystem_platform.dylib`_platform_memmove (81 samples, 0.17%)libsystem_platform.dylib`_platform_memmove (18 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (10 samples, 0.02%)libsystem_malloc.dylib`_realloc (10 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (26 samples, 0.05%)libsystem_malloc.dylib`_realloc (49 samples, 0.10%)libsystem_malloc.dylib`_malloc_zone_realloc (46 samples, 0.10%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (64 samples, 0.13%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (60 samples, 0.13%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (54 samples, 0.11%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (106 samples, 0.22%)sqlparser_bench-959bc5267970ca34`core::fmt::write (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (92 samples, 0.19%)sqlparser_bench-959bc5267970ca34`core::fmt::write (90 samples, 0.19%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (13 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (10 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (73 samples, 0.15%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (71 samples, 0.15%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (277 samples, 0.58%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_parse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_order_by_expr (357 samples, 0.75%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (343 samples, 0.72%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_order_by (481 samples, 1.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (397 samples, 0.83%)libsystem_malloc.dylib`szone_malloc_should_clear (23 samples, 0.05%)libsystem_malloc.dylib`small_malloc_should_clear (18 samples, 0.04%)libsystem_malloc.dylib`small_malloc_from_free_list (15 samples, 0.03%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (173 samples, 0.36%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (11 samples, 0.02%)libsystem_malloc.dylib`rack_get_thread_index (5 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::FnOnce::call_once (42 samples, 0.09%)libsystem_malloc.dylib`szone_malloc_should_clear (36 samples, 0.08%)libsystem_malloc.dylib`small_malloc_should_clear (26 samples, 0.05%)libsystem_malloc.dylib`small_malloc_from_free_list (21 samples, 0.04%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keywords (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_one_of_keywords (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_group_by (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (106 samples, 0.22%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (13 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_keyword (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::is_parse_comma_separated_end_with_trailing_commas (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_all_or_distinct (11 samples, 0.02%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (195 samples, 0.41%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::cmp::PartialEq>::eq (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (10 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (9 samples, 0.02%)libsystem_malloc.dylib`small_free_list_add_ptr (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (94 samples, 0.20%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (56 samples, 0.12%)libsystem_malloc.dylib`szone_malloc_should_clear (28 samples, 0.06%)libsystem_malloc.dylib`small_malloc_should_clear (19 samples, 0.04%)libsystem_malloc.dylib`small_malloc_from_free_list (13 samples, 0.03%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::is_parse_comma_separated_end_with_trailing_commas (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_select_item_exclude (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_select_item_ilike (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_select_item_rename (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_additional_options (48 samples, 0.10%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (20 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (11 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (22 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (18 samples, 0.04%)libsystem_platform.dylib`_platform_memset (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_pointer_size (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (29 samples, 0.06%)libsystem_malloc.dylib`nanov2_realloc (16 samples, 0.03%)libsystem_malloc.dylib`_realloc (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (59 samples, 0.12%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (94 samples, 0.20%)sqlparser_bench-959bc5267970ca34`core::fmt::write (93 samples, 0.20%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (111 samples, 0.23%)sqlparser_bench-959bc5267970ca34`core::fmt::write (102 samples, 0.21%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (26 samples, 0.05%)libsystem_malloc.dylib`_nanov2_free (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (9 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_type_modifiers (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (111 samples, 0.23%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (118 samples, 0.25%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_compound_field_access (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_token (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_token (23 samples, 0.05%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (16 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (21 samples, 0.04%)libsystem_malloc.dylib`szone_malloc_should_clear (18 samples, 0.04%)libsystem_malloc.dylib`small_malloc_should_clear (17 samples, 0.04%)libsystem_malloc.dylib`small_malloc_from_free_list (15 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (17 samples, 0.04%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (14 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (11 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (15 samples, 0.03%)libsystem_malloc.dylib`_realloc (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (60 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (106 samples, 0.22%)sqlparser_bench-959bc5267970ca34`core::fmt::write (97 samples, 0.20%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_named_arg_operator (117 samples, 0.25%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (31 samples, 0.07%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (14 samples, 0.03%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (15 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_realloc (33 samples, 0.07%)libsystem_platform.dylib`_platform_memmove (10 samples, 0.02%)libsystem_malloc.dylib`_realloc (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (48 samples, 0.10%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (78 samples, 0.16%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (102 samples, 0.21%)sqlparser_bench-959bc5267970ca34`core::fmt::write (94 samples, 0.20%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (19 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (64 samples, 0.13%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (69 samples, 0.14%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_parse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (259 samples, 0.54%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_args (554 samples, 1.16%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (354 samples, 0.74%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (318 samples, 0.67%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (616 samples, 1.29%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_argument_list (706 samples, 1.48%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_json_null_clause (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_call (781 samples, 1.64%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (824 samples, 1.73%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (1,166 samples, 2.45%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select_item (1,371 samples, 2.88%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (1,283 samples, 2.69%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,236 samples, 2.60%)sq..libsystem_malloc.dylib`_free (19 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (20 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<T as core::any::Any>::type_id (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (15 samples, 0.03%)libsystem_malloc.dylib`szone_malloc_should_clear (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_join_constraint (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_parenthesized_column_list (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keywords (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_one_of_keywords (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::cmp::PartialEq>::eq (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::consume_token (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::cmp::PartialEq>::eq (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::maybe_parse_table_sample (12 samples, 0.03%)libsystem_malloc.dylib`_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (27 samples, 0.06%)libsystem_malloc.dylib`nanov2_malloc_type (25 samples, 0.05%)libsystem_malloc.dylib`nanov2_allocate_outlined (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (6 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (19 samples, 0.04%)libsystem_malloc.dylib`nanov2_malloc_type (13 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (13 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (170 samples, 0.36%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (98 samples, 0.21%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (14 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_table_alias (111 samples, 0.23%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_alias (95 samples, 0.20%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (37 samples, 0.08%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (12 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (22 samples, 0.05%)libsystem_malloc.dylib`nanov2_malloc_type (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_factor (711 samples, 1.49%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_tokens (208 samples, 0.44%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (91 samples, 0.19%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_and_joins (1,055 samples, 2.22%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_tokens (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (2,821 samples, 5.92%)sqlparse..libsystem_malloc.dylib`_malloc_zone_malloc (15 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (73 samples, 0.15%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)libsystem_malloc.dylib`rack_get_thread_index (10 samples, 0.02%)libsystem_malloc.dylib`tiny_malloc_from_free_list (8 samples, 0.02%)libsystem_malloc.dylib`set_tiny_meta_header_in_use (18 samples, 0.04%)libsystem_malloc.dylib`szone_malloc_should_clear (129 samples, 0.27%)libsystem_malloc.dylib`tiny_malloc_should_clear (95 samples, 0.20%)libsystem_malloc.dylib`tiny_malloc_from_free_list (59 samples, 0.12%)libsystem_malloc.dylib`tiny_free_list_add_ptr (8 samples, 0.02%)libsystem_malloc.dylib`tiny_malloc_should_clear (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (83 samples, 0.17%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)libsystem_malloc.dylib`set_tiny_meta_header_in_use (8 samples, 0.02%)libsystem_malloc.dylib`szone_malloc_should_clear (50 samples, 0.11%)libsystem_malloc.dylib`tiny_malloc_should_clear (42 samples, 0.09%)libsystem_malloc.dylib`tiny_malloc_from_free_list (30 samples, 0.06%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memset (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (19 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)libsystem_malloc.dylib`_realloc (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (64 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::write (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (142 samples, 0.30%)sqlparser_bench-959bc5267970ca34`core::fmt::write (130 samples, 0.27%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (168 samples, 0.35%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (159 samples, 0.33%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_value (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (213 samples, 0.45%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_infix (349 samples, 0.73%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (271 samples, 0.57%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (20 samples, 0.04%)libsystem_platform.dylib`_platform_memset (11 samples, 0.02%)libsystem_malloc.dylib`nanov2_pointer_size (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (17 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (36 samples, 0.08%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`_realloc (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (59 samples, 0.12%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (63 samples, 0.13%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (91 samples, 0.19%)sqlparser_bench-959bc5267970ca34`core::fmt::write (89 samples, 0.19%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (105 samples, 0.22%)sqlparser_bench-959bc5267970ca34`core::fmt::write (99 samples, 0.21%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (13 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memset (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`_realloc (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_pointer_size (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (25 samples, 0.05%)libsystem_platform.dylib`_platform_memset (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_realloc (47 samples, 0.10%)libsystem_platform.dylib`_platform_memmove (9 samples, 0.02%)libsystem_malloc.dylib`_realloc (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (80 samples, 0.17%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (73 samples, 0.15%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (120 samples, 0.25%)sqlparser_bench-959bc5267970ca34`core::fmt::write (118 samples, 0.25%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (261 samples, 0.55%)sqlparser_bench-959bc5267970ca34`core::fmt::write (205 samples, 0.43%)sqlparser_bench-959bc5267970ca34`core::fmt::write (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (379 samples, 0.80%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (360 samples, 0.76%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (17 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_value (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_parse (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (673 samples, 1.41%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_infix (1,392 samples, 2.92%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,215 samples, 2.55%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (24 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (23 samples, 0.05%)libsystem_platform.dylib`_platform_memset (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (35 samples, 0.07%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)libsystem_malloc.dylib`_realloc (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (47 samples, 0.10%)libsystem_malloc.dylib`nanov2_size (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (128 samples, 0.27%)sqlparser_bench-959bc5267970ca34`core::fmt::write (123 samples, 0.26%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral::write_prefix (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (246 samples, 0.52%)sqlparser_bench-959bc5267970ca34`core::fmt::write (223 samples, 0.47%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (273 samples, 0.57%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (269 samples, 0.56%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_value (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (343 samples, 0.72%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,947 samples, 4.09%)sqlp..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select (5,109 samples, 10.73%)sqlparser_bench-..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query_body (5,450 samples, 11.45%)sqlparser_bench-9..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query (8,118 samples, 17.05%)sqlparser_bench-959bc52679..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_settings (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_statements (8,347 samples, 17.53%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_statement (8,222 samples, 17.27%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::next_token (177 samples, 0.37%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_identifier_or_keyword (36 samples, 0.08%)libsystem_malloc.dylib`_free (161 samples, 0.34%)libsystem_malloc.dylib`_nanov2_free (39 samples, 0.08%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::iter::traits::collect::FromIterator<char>>::from_iter (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::generic::GenericDialect as sqlparser::dialect::Dialect>::is_delimited_identifier_start (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::mssql::MsSqlDialect as sqlparser::dialect::Dialect>::is_identifier_start (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`__rdl_dealloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$realloc (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_realloc (7 samples, 0.01%)libsystem_malloc.dylib`szone_good_size (6 samples, 0.01%)libsystem_malloc.dylib`_szone_free (5 samples, 0.01%)libsystem_malloc.dylib`tiny_free_list_add_ptr (6 samples, 0.01%)libsystem_malloc.dylib`free_tiny (29 samples, 0.06%)libsystem_malloc.dylib`tiny_free_no_lock (20 samples, 0.04%)libsystem_malloc.dylib`small_try_realloc_in_place (14 samples, 0.03%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (7 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (5 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (43 samples, 0.09%)libsystem_malloc.dylib`small_malloc_should_clear (33 samples, 0.07%)libsystem_malloc.dylib`small_malloc_from_free_list (28 samples, 0.06%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (7 samples, 0.01%)libsystem_malloc.dylib`szone_size (23 samples, 0.05%)libsystem_malloc.dylib`tiny_size (22 samples, 0.05%)libsystem_malloc.dylib`tiny_try_realloc_in_place (23 samples, 0.05%)libsystem_malloc.dylib`tiny_free_list_remove_ptr (6 samples, 0.01%)libsystem_malloc.dylib`szone_realloc (186 samples, 0.39%)libsystem_platform.dylib`_platform_memset (9 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (252 samples, 0.53%)libsystem_platform.dylib`_platform_memmove (29 samples, 0.06%)libsystem_malloc.dylib`_realloc (330 samples, 0.69%)libsystem_malloc.dylib`szone_size (42 samples, 0.09%)libsystem_malloc.dylib`tiny_size (40 samples, 0.08%)libsystem_malloc.dylib`szone_malloc_should_clear (47 samples, 0.10%)libsystem_malloc.dylib`tiny_malloc_should_clear (34 samples, 0.07%)libsystem_malloc.dylib`tiny_malloc_from_free_list (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (436 samples, 0.92%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (405 samples, 0.85%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::is_custom_operator_part (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::State::next (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::State::peek (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::consume_and_return (25 samples, 0.05%)libsystem_malloc.dylib`_free (9 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (13 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (30 samples, 0.06%)libsystem_platform.dylib`_platform_memcmp (80 samples, 0.17%)libsystem_platform.dylib`_platform_memmove (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::mssql::MsSqlDialect as sqlparser::dialect::Dialect>::is_identifier_part (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcmp (21 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_malloc (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (18 samples, 0.04%)libsystem_malloc.dylib`nanov2_malloc_type (17 samples, 0.04%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Token::make_word (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::str::_<impl str>::to_uppercase (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_pointer_size (11 samples, 0.02%)libsystem_malloc.dylib`_realloc (30 samples, 0.06%)libsystem_malloc.dylib`_malloc_zone_realloc (23 samples, 0.05%)libsystem_malloc.dylib`nanov2_realloc (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (39 samples, 0.08%)libsystem_malloc.dylib`nanov2_size (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::next_token (884 samples, 1.86%)s..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_word (116 samples, 0.24%)libsystem_malloc.dylib`_free (37 samples, 0.08%)libsystem_malloc.dylib`_malloc_zone_malloc (39 samples, 0.08%)libsystem_malloc.dylib`_nanov2_free (231 samples, 0.49%)libsystem_platform.dylib`_platform_memcmp (593 samples, 1.25%)libsystem_platform.dylib`_platform_memmove (110 samples, 0.23%)libsystem_malloc.dylib`_malloc_zone_malloc (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (183 samples, 0.38%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (83 samples, 0.17%)libsystem_malloc.dylib`nanov2_malloc_type (66 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::iter::traits::collect::FromIterator<char>>::from_iter (235 samples, 0.49%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::mssql::MsSqlDialect as sqlparser::dialect::Dialect>::is_identifier_part (119 samples, 0.25%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$malloc (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcmp (194 samples, 0.41%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::str::_<impl str>::to_uppercase (38 samples, 0.08%)libsystem_malloc.dylib`_malloc_zone_malloc (42 samples, 0.09%)libsystem_malloc.dylib`nanov2_malloc_type (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$malloc (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Token::make_word (363 samples, 0.76%)sqlparser_bench-959bc5267970ca34`alloc::str::_<impl str>::to_uppercase (137 samples, 0.29%)libsystem_malloc.dylib`nanov2_malloc_type (59 samples, 0.12%)libsystem_malloc.dylib`_nanov2_free (78 samples, 0.16%)libsystem_malloc.dylib`_malloc_zone_malloc (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$malloc (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (158 samples, 0.33%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (55 samples, 0.12%)libsystem_malloc.dylib`nanov2_malloc_type (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_identifier_or_keyword (2,644 samples, 5.55%)sqlpars..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_word (528 samples, 1.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_with_location (5,051 samples, 10.61%)sqlparser_bench..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_with_location_into_buf (4,835 samples, 10.15%)sqlparser_bench..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_word (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_with_sql (5,081 samples, 10.67%)sqlparser_bench-..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_sql (13,813 samples, 29.01%)sqlparser_bench-959bc5267970ca34`sqlparser::par..sqlparser_bench-959bc5267970ca34`criterion::bencher::Bencher<M>::iter (18,926 samples, 39.75%)sqlparser_bench-959bc5267970ca34`criterion::bencher::Bencher<M>::..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_with_sql (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::vec::Vec<T> as alloc::vec::spec_from_iter::SpecFromIter<T,I>>::from_iter (18,946 samples, 39.79%)sqlparser_bench-959bc5267970ca34`<alloc::vec::Vec<T> as alloc::ve..libsystem_malloc.dylib`_free (8 samples, 0.02%)libsystem_malloc.dylib`_free (57 samples, 0.12%)libsystem_malloc.dylib`_nanov2_free (33 samples, 0.07%)libsystem_malloc.dylib`_szone_free (12 samples, 0.03%)libsystem_malloc.dylib`small_free_list_add_ptr (14 samples, 0.03%)libsystem_malloc.dylib`small_free_list_find_by_ptr (8 samples, 0.02%)libsystem_malloc.dylib`free_small (60 samples, 0.13%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::OrderByExpr as core::fmt::Display>::fmt (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (22 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (143 samples, 0.30%)sqlparser_bench-959bc5267970ca34`core::fmt::write (113 samples, 0.24%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_str (8 samples, 0.02%)libdyld.dylib`tlv_get_addr (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_fmt (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::write (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::OrderByExpr as core::fmt::Display>::fmt (376 samples, 0.79%)sqlparser_bench-959bc5267970ca34`core::fmt::write (282 samples, 0.59%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (542 samples, 1.14%)sqlparser_bench-959bc5267970ca34`core::fmt::write (500 samples, 1.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::OrderBy as core::fmt::Display>::fmt (570 samples, 1.20%)sqlparser_bench-959bc5267970ca34`core::fmt::write (570 samples, 1.20%)sqlparser_bench-959bc5267970ca34`core::fmt::write (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Join as core::fmt::Display>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<<sqlparser::ast::query::Join as core::fmt::Display>::fmt::suffix::Suffix as core::fmt::Display>::fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::TableFactor as core::fmt::Display>::fmt (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<<sqlparser::ast::query::Join as core::fmt::Display>::fmt::suffix::Suffix as core::fmt::Display>::fmt (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (18 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (232 samples, 0.49%)sqlparser_bench-959bc5267970ca34`core::fmt::write (161 samples, 0.34%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (311 samples, 0.65%)sqlparser_bench-959bc5267970ca34`core::fmt::write (272 samples, 0.57%)sqlparser_bench-959bc5267970ca34`core::fmt::write (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::TableFactor as core::fmt::Display>::fmt (412 samples, 0.87%)sqlparser_bench-959bc5267970ca34`core::fmt::write (347 samples, 0.73%)sqlparser_bench-959bc5267970ca34`core::fmt::write (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Join as core::fmt::Display>::fmt (733 samples, 1.54%)sqlparser_bench-959bc5267970ca34`core::fmt::write (648 samples, 1.36%)sqlparser_bench-959bc5267970ca34`core::fmt::write (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (801 samples, 1.68%)sqlparser_bench-959bc5267970ca34`core::fmt::write (780 samples, 1.64%)sqlparser_bench-959bc5267970ca34`core::fmt::write (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (11 samples, 0.02%)libdyld.dylib`tlv_get_addr (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Function as core::fmt::Display>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArguments as core::fmt::Display>::fmt (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (23 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArg as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (21 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgExpr as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (11 samples, 0.02%)libdyld.dylib`tlv_get_addr (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (178 samples, 0.37%)sqlparser_bench-959bc5267970ca34`core::fmt::write (133 samples, 0.28%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_str (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::write (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgExpr as core::fmt::Display>::fmt (340 samples, 0.71%)sqlparser_bench-959bc5267970ca34`core::fmt::write (311 samples, 0.65%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArg as core::fmt::Display>::fmt (401 samples, 0.84%)sqlparser_bench-959bc5267970ca34`core::fmt::write (378 samples, 0.79%)sqlparser_bench-959bc5267970ca34`core::fmt::write (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (580 samples, 1.22%)sqlparser_bench-959bc5267970ca34`core::fmt::write (508 samples, 1.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgumentList as core::fmt::Display>::fmt (655 samples, 1.38%)sqlparser_bench-959bc5267970ca34`core::fmt::write (637 samples, 1.34%)sqlparser_bench-959bc5267970ca34`core::fmt::write (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArguments as core::fmt::Display>::fmt (813 samples, 1.71%)sqlparser_bench-959bc5267970ca34`core::fmt::write (772 samples, 1.62%)sqlparser_bench-959bc5267970ca34`core::fmt::write (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (15 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (239 samples, 0.50%)sqlparser_bench-959bc5267970ca34`core::fmt::write (152 samples, 0.32%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::ObjectName as core::fmt::Display>::fmt (320 samples, 0.67%)sqlparser_bench-959bc5267970ca34`core::fmt::write (294 samples, 0.62%)sqlparser_bench-959bc5267970ca34`core::fmt::write (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Function as core::fmt::Display>::fmt (1,284 samples, 2.70%)sq..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,221 samples, 2.56%)sq..sqlparser_bench-959bc5267970ca34`core::fmt::write (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,364 samples, 2.86%)sq..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,331 samples, 2.80%)sq..sqlparser_bench-959bc5267970ca34`core::fmt::write (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::SelectItem as core::fmt::Display>::fmt (1,540 samples, 3.23%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,504 samples, 3.16%)sql..sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (2,493 samples, 5.24%)sqlpar..sqlparser_bench-959bc5267970ca34`core::fmt::write (2,446 samples, 5.14%)sqlpar..sqlparser_bench-959bc5267970ca34`core::fmt::write (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::write (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::write (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::write (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`core::fmt::write (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`core::fmt::write (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`core::fmt::write (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`core::fmt::write (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`core::fmt::write (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Statement as core::fmt::Display>::fmt (3,140 samples, 6.59%)sqlparser..sqlparser_bench-959bc5267970ca34`core::fmt::write (3,140 samples, 6.59%)sqlparser..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Query as core::fmt::Display>::fmt (3,140 samples, 6.59%)sqlparser..sqlparser_bench-959bc5267970ca34`core::fmt::write (3,140 samples, 6.59%)sqlparser..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::SetExpr as core::fmt::Display>::fmt (2,570 samples, 5.40%)sqlpars..sqlparser_bench-959bc5267970ca34`core::fmt::write (2,570 samples, 5.40%)sqlpars..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Select as core::fmt::Display>::fmt (2,570 samples, 5.40%)sqlpars..sqlparser_bench-959bc5267970ca34`core::fmt::write (2,570 samples, 5.40%)sqlpars..sqlparser_bench-959bc5267970ca34`core::fmt::write (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (24 samples, 0.05%)libsystem_malloc.dylib`_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)libsystem_malloc.dylib`_free (14 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_malloc.dylib`_szone_free (18 samples, 0.04%)libsystem_malloc.dylib`small_free_list_add_ptr (8 samples, 0.02%)libsystem_malloc.dylib`small_free_list_remove_ptr (7 samples, 0.01%)libsystem_malloc.dylib`free_small (64 samples, 0.13%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (8 samples, 0.02%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`free_small (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)libsystem_malloc.dylib`free_small (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Function> (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::FunctionArgumentList> (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::Cte> (77 samples, 0.16%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::Query> (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::SetExpr> (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::SelectItem> (32 samples, 0.07%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_malloc.dylib`free_medium (28 samples, 0.06%)libsystem_kernel.dylib`madvise (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::OrderBy> (42 samples, 0.09%)libsystem_malloc.dylib`_free (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (20 samples, 0.04%)libsystem_malloc.dylib`_szone_free (10 samples, 0.02%)libsystem_malloc.dylib`free_medium (27 samples, 0.06%)libsystem_kernel.dylib`madvise (27 samples, 0.06%)libsystem_malloc.dylib`small_free_list_add_ptr (14 samples, 0.03%)libsystem_malloc.dylib`small_free_list_find_by_ptr (5 samples, 0.01%)libsystem_malloc.dylib`free_small (64 samples, 0.13%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (8 samples, 0.02%)libsystem_malloc.dylib`tiny_free_list_add_ptr (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (40 samples, 0.08%)libsystem_malloc.dylib`tiny_free_no_lock (23 samples, 0.05%)libsystem_malloc.dylib`tiny_free_list_remove_ptr (6 samples, 0.01%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)libsystem_malloc.dylib`free_medium (12 samples, 0.03%)libsystem_kernel.dylib`madvise (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (45 samples, 0.09%)libsystem_malloc.dylib`nanov2_madvise_block (13 samples, 0.03%)libsystem_malloc.dylib`nanov2_madvise_block_locked (13 samples, 0.03%)libsystem_kernel.dylib`madvise (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::vec::Vec<T,A> as core::ops::drop::Drop>::drop (133 samples, 0.28%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::TableFactor> (89 samples, 0.19%)libsystem_platform.dylib`_platform_memset (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (11 samples, 0.02%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_malloc.dylib`free_tiny (25 samples, 0.05%)libsystem_malloc.dylib`tiny_free_no_lock (15 samples, 0.03%)libsystem_malloc.dylib`tiny_free_list_add_ptr (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (56 samples, 0.12%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`_szone_free (7 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (14 samples, 0.03%)libsystem_malloc.dylib`free_small (28 samples, 0.06%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Function> (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::FunctionArgumentList> (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::SetExpr> (478 samples, 1.00%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::query::Query>> (753 samples, 1.58%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::Query> (737 samples, 1.55%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (142 samples, 0.30%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Whitespace> (32 samples, 0.07%)libsystem_malloc.dylib`_nanov2_free (121 samples, 0.25%)libsystem_malloc.dylib`free_medium (45 samples, 0.09%)libsystem_kernel.dylib`madvise (45 samples, 0.09%)libsystem_malloc.dylib`free_small (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_madvise_block (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_madvise_block_locked (6 samples, 0.01%)libsystem_kernel.dylib`madvise (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (95 samples, 0.20%)libsystem_platform.dylib`_platform_memset (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (27 samples, 0.06%)libsystem_malloc.dylib`small_free_list_add_ptr (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (80 samples, 0.17%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (58 samples, 0.12%)libsystem_malloc.dylib`szone_malloc_should_clear (36 samples, 0.08%)libsystem_malloc.dylib`small_malloc_should_clear (30 samples, 0.06%)libsystem_malloc.dylib`small_malloc_from_free_list (25 samples, 0.05%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (14 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (54 samples, 0.11%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (138 samples, 0.29%)libsystem_malloc.dylib`small_free_list_add_ptr (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::alloc::exchange_malloc (35 samples, 0.07%)libsystem_malloc.dylib`szone_malloc_should_clear (32 samples, 0.07%)libsystem_malloc.dylib`small_malloc_should_clear (24 samples, 0.05%)libsystem_malloc.dylib`small_malloc_from_free_list (23 samples, 0.05%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_token (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::ops::function::FnOnce::call_once (6 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keywords (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_keyword (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (13 samples, 0.03%)libsystem_malloc.dylib`szone_malloc_should_clear (5 samples, 0.01%)libsystem_malloc.dylib`small_malloc_should_clear (5 samples, 0.01%)libsystem_malloc.dylib`small_malloc_from_free_list (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_alias (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (7 samples, 0.01%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (13 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_realloc (20 samples, 0.04%)libsystem_malloc.dylib`_realloc (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (67 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::write (66 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (14 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (96 samples, 0.20%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_type_modifiers (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_compound_field_access (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (7 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (10 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (7 samples, 0.01%)libsystem_malloc.dylib`_realloc (13 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_realloc (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_named_arg_operator (70 samples, 0.15%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (13 samples, 0.03%)libsystem_malloc.dylib`_realloc (13 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_realloc (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (93 samples, 0.20%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_args (239 samples, 0.50%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (120 samples, 0.25%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_argument_list (315 samples, 0.66%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (274 samples, 0.58%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_call (334 samples, 0.70%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (364 samples, 0.76%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (614 samples, 1.29%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_parse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select_item (740 samples, 1.55%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (698 samples, 1.47%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (669 samples, 1.40%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_factor (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_tokens (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (865 samples, 1.82%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_and_joins (82 samples, 0.17%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (6 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_realloc (13 samples, 0.03%)libsystem_malloc.dylib`_realloc (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_group_by (93 samples, 0.20%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (76 samples, 0.16%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query (1,096 samples, 2.30%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query_body (1,055 samples, 2.22%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select (1,000 samples, 2.10%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_cte (1,138 samples, 2.39%)sq..libsystem_platform.dylib`_platform_memmove (79 samples, 0.17%)libsystem_platform.dylib`_platform_memmove (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (11 samples, 0.02%)libsystem_malloc.dylib`_realloc (11 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (20 samples, 0.04%)libsystem_malloc.dylib`_realloc (42 samples, 0.09%)libsystem_malloc.dylib`_malloc_zone_realloc (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (84 samples, 0.18%)sqlparser_bench-959bc5267970ca34`core::fmt::write (81 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`core::fmt::write (93 samples, 0.20%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (16 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (269 samples, 0.56%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (377 samples, 0.79%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_order_by_expr (348 samples, 0.73%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (323 samples, 0.68%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_order_by (462 samples, 0.97%)libsystem_malloc.dylib`small_free_list_add_ptr (6 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (36 samples, 0.08%)libsystem_malloc.dylib`small_malloc_should_clear (23 samples, 0.05%)libsystem_malloc.dylib`small_malloc_from_free_list (20 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (139 samples, 0.29%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (9 samples, 0.02%)libsystem_malloc.dylib`small_free_list_remove_ptr (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::FnOnce::call_once (40 samples, 0.08%)libsystem_malloc.dylib`szone_malloc_should_clear (34 samples, 0.07%)libsystem_malloc.dylib`small_malloc_should_clear (25 samples, 0.05%)libsystem_malloc.dylib`small_malloc_from_free_list (22 samples, 0.05%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keywords (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (95 samples, 0.20%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (6 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_keyword (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::is_parse_comma_separated_end_with_trailing_commas (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_all_or_distinct (12 samples, 0.03%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (191 samples, 0.40%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (16 samples, 0.03%)libsystem_malloc.dylib`_realloc (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_realloc (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`rack_get_thread_index (5 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (104 samples, 0.22%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (72 samples, 0.15%)libsystem_malloc.dylib`szone_malloc_should_clear (47 samples, 0.10%)libsystem_malloc.dylib`small_malloc_should_clear (32 samples, 0.07%)libsystem_malloc.dylib`small_malloc_from_free_list (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::is_parse_comma_separated_end_with_trailing_commas (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::cmp::PartialEq>::eq (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_select_item_exclude (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_select_item_rename (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_additional_options (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_select_item_replace (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::cmp::PartialEq>::eq (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (17 samples, 0.04%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (18 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (30 samples, 0.06%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_malloc.dylib`_realloc (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (78 samples, 0.16%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (88 samples, 0.18%)sqlparser_bench-959bc5267970ca34`core::fmt::write (84 samples, 0.18%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_type_modifiers (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (130 samples, 0.27%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (131 samples, 0.28%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_compound_field_access (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_token (19 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (14 samples, 0.03%)libsystem_malloc.dylib`small_free_list_add_ptr (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (26 samples, 0.05%)libsystem_malloc.dylib`szone_malloc_should_clear (22 samples, 0.05%)libsystem_malloc.dylib`small_malloc_should_clear (22 samples, 0.05%)libsystem_malloc.dylib`small_malloc_from_free_list (18 samples, 0.04%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (5 samples, 0.01%)libsystem_malloc.dylib`_free (8 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (10 samples, 0.02%)libsystem_malloc.dylib`_realloc (20 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (115 samples, 0.24%)sqlparser_bench-959bc5267970ca34`core::fmt::write (96 samples, 0.20%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_named_arg_operator (131 samples, 0.28%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (32 samples, 0.07%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (16 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (15 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_realloc (29 samples, 0.06%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)libsystem_malloc.dylib`_realloc (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (39 samples, 0.08%)libsystem_malloc.dylib`nanov2_size (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (70 samples, 0.15%)sqlparser_bench-959bc5267970ca34`core::fmt::write (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (80 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (74 samples, 0.16%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (200 samples, 0.42%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (559 samples, 1.17%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_args (485 samples, 1.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (277 samples, 0.58%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (243 samples, 0.51%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_argument_list (663 samples, 1.39%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_listagg_on_overflow (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_json_null_clause (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_call (723 samples, 1.52%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_listagg_on_overflow (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (766 samples, 1.61%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (1,085 samples, 2.28%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select_item (1,285 samples, 2.70%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (1,190 samples, 2.50%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,144 samples, 2.40%)sq..libsystem_malloc.dylib`_free (15 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (15 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<T as core::any::Any>::type_id (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (8 samples, 0.02%)libsystem_malloc.dylib`_realloc (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_realloc (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_join_constraint (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_parenthesized_column_list (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keywords (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_one_of_keywords (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<T as core::any::Any>::type_id (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::cmp::PartialEq>::eq (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::maybe_parse_table_sample (6 samples, 0.01%)libsystem_malloc.dylib`_free (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (26 samples, 0.05%)libsystem_malloc.dylib`nanov2_malloc_type (25 samples, 0.05%)libsystem_malloc.dylib`nanov2_allocate_outlined (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (23 samples, 0.05%)libsystem_malloc.dylib`nanov2_malloc_type (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (23 samples, 0.05%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (231 samples, 0.49%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (147 samples, 0.31%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (13 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_table_alias (100 samples, 0.21%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_alias (82 samples, 0.17%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (37 samples, 0.08%)libsystem_malloc.dylib`_malloc_zone_malloc (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (15 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (33 samples, 0.07%)libsystem_malloc.dylib`_malloc_zone_malloc (15 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (19 samples, 0.04%)libsystem_malloc.dylib`nanov2_malloc_type (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_factor (701 samples, 1.47%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_tokens (177 samples, 0.37%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_version (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_and_joins (991 samples, 2.08%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (2,665 samples, 5.60%)sqlpars..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_infix (6 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (85 samples, 0.18%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (8 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (14 samples, 0.03%)libsystem_malloc.dylib`rack_get_thread_index (8 samples, 0.02%)libsystem_malloc.dylib`tiny_malloc_from_free_list (8 samples, 0.02%)libsystem_malloc.dylib`set_tiny_meta_header_in_use (11 samples, 0.02%)libsystem_malloc.dylib`_tiny_check_and_zero_inline_meta_from_freelist (5 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (114 samples, 0.24%)libsystem_malloc.dylib`tiny_malloc_should_clear (81 samples, 0.17%)libsystem_malloc.dylib`tiny_malloc_from_free_list (53 samples, 0.11%)libsystem_malloc.dylib`tiny_free_list_add_ptr (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (23 samples, 0.05%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)libsystem_malloc.dylib`set_tiny_meta_header_in_use (8 samples, 0.02%)libsystem_malloc.dylib`szone_malloc_should_clear (37 samples, 0.08%)libsystem_malloc.dylib`tiny_malloc_should_clear (36 samples, 0.08%)libsystem_malloc.dylib`tiny_malloc_from_free_list (25 samples, 0.05%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (13 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (9 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (18 samples, 0.04%)libsystem_malloc.dylib`_realloc (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (22 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`core::fmt::write (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::write (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (157 samples, 0.33%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (155 samples, 0.33%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`core::fmt::write (121 samples, 0.25%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (223 samples, 0.47%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_value (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_infix (338 samples, 0.71%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (291 samples, 0.61%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (24 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_platform.dylib`_platform_memset (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_pointer_size (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (18 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (38 samples, 0.08%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)libsystem_malloc.dylib`_realloc (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (55 samples, 0.12%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (85 samples, 0.18%)sqlparser_bench-959bc5267970ca34`core::fmt::write (84 samples, 0.18%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (96 samples, 0.20%)sqlparser_bench-959bc5267970ca34`core::fmt::write (89 samples, 0.19%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (11 samples, 0.02%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (15 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_pointer_size (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (21 samples, 0.04%)libsystem_malloc.dylib`_realloc (46 samples, 0.10%)libsystem_malloc.dylib`_malloc_zone_realloc (39 samples, 0.08%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (57 samples, 0.12%)libsystem_malloc.dylib`nanov2_size (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (73 samples, 0.15%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral::write_prefix (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (121 samples, 0.25%)sqlparser_bench-959bc5267970ca34`core::fmt::write (120 samples, 0.25%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (260 samples, 0.55%)sqlparser_bench-959bc5267970ca34`core::fmt::write (214 samples, 0.45%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (352 samples, 0.74%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (365 samples, 0.77%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (11 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_value (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (622 samples, 1.31%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_infix (1,318 samples, 2.77%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,147 samples, 2.41%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (23 samples, 0.05%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (20 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (21 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (34 samples, 0.07%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_malloc.dylib`_realloc (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (48 samples, 0.10%)libsystem_malloc.dylib`nanov2_size (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral (14 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral::write_prefix (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (126 samples, 0.26%)sqlparser_bench-959bc5267970ca34`core::fmt::write (123 samples, 0.26%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (245 samples, 0.51%)sqlparser_bench-959bc5267970ca34`core::fmt::write (224 samples, 0.47%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (281 samples, 0.59%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (270 samples, 0.57%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_value (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (365 samples, 0.77%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,893 samples, 3.98%)sqlp..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query_body (5,198 samples, 10.92%)sqlparser_bench-..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select (4,878 samples, 10.24%)sqlparser_bench..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query (7,112 samples, 14.94%)sqlparser_bench-959bc52..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_statements (7,412 samples, 15.57%)sqlparser_bench-959bc526..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_statement (7,260 samples, 15.25%)sqlparser_bench-959bc52..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::next_token (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_identifier_or_keyword (26 samples, 0.05%)libsystem_malloc.dylib`_free (121 samples, 0.25%)libsystem_malloc.dylib`_nanov2_free (33 samples, 0.07%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::iter::traits::collect::FromIterator<char>>::from_iter (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::generic::GenericDialect as sqlparser::dialect::Dialect>::is_delimited_identifier_start (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::mssql::MsSqlDialect as sqlparser::dialect::Dialect>::is_identifier_start (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`__rdl_dealloc (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (11 samples, 0.02%)libsystem_malloc.dylib`_szone_free (5 samples, 0.01%)libsystem_malloc.dylib`free_medium (5 samples, 0.01%)libsystem_kernel.dylib`madvise (5 samples, 0.01%)libsystem_malloc.dylib`tiny_free_list_add_ptr (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (33 samples, 0.07%)libsystem_malloc.dylib`tiny_free_no_lock (27 samples, 0.06%)libsystem_malloc.dylib`tiny_free_list_remove_ptr (8 samples, 0.02%)libsystem_malloc.dylib`small_try_realloc_in_place (12 samples, 0.03%)libsystem_malloc.dylib`small_free_list_add_ptr (5 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (38 samples, 0.08%)libsystem_malloc.dylib`small_malloc_should_clear (27 samples, 0.06%)libsystem_malloc.dylib`small_malloc_from_free_list (23 samples, 0.05%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (8 samples, 0.02%)libsystem_malloc.dylib`szone_size (9 samples, 0.02%)libsystem_malloc.dylib`tiny_size (8 samples, 0.02%)libsystem_malloc.dylib`tiny_try_realloc_in_place (24 samples, 0.05%)libsystem_malloc.dylib`szone_realloc (166 samples, 0.35%)libsystem_platform.dylib`_platform_memset (10 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (223 samples, 0.47%)libsystem_platform.dylib`_platform_memmove (36 samples, 0.08%)libsystem_malloc.dylib`nanov2_realloc (6 samples, 0.01%)libsystem_malloc.dylib`szone_realloc (10 samples, 0.02%)libsystem_malloc.dylib`_realloc (297 samples, 0.62%)libsystem_malloc.dylib`szone_size (34 samples, 0.07%)libsystem_malloc.dylib`tiny_size (33 samples, 0.07%)libsystem_malloc.dylib`szone_malloc_should_clear (39 samples, 0.08%)libsystem_malloc.dylib`tiny_malloc_should_clear (31 samples, 0.07%)libsystem_malloc.dylib`tiny_malloc_from_free_list (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (401 samples, 0.84%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (358 samples, 0.75%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::State::next (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::State::peek (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::consume_and_return (18 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_malloc (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (21 samples, 0.04%)libsystem_platform.dylib`_platform_memcmp (42 samples, 0.09%)libsystem_platform.dylib`_platform_memmove (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::mssql::MsSqlDialect as sqlparser::dialect::Dialect>::is_identifier_part (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcmp (15 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_malloc (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (10 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Token::make_word (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::str::_<impl str>::to_uppercase (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::next_token (641 samples, 1.35%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_word (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (7 samples, 0.01%)libsystem_malloc.dylib`_free (37 samples, 0.08%)libsystem_malloc.dylib`_malloc_zone_malloc (32 samples, 0.07%)libsystem_malloc.dylib`_nanov2_free (214 samples, 0.45%)libsystem_platform.dylib`_platform_memcmp (527 samples, 1.11%)libsystem_platform.dylib`_platform_memmove (89 samples, 0.19%)libsystem_malloc.dylib`_malloc_zone_malloc (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (63 samples, 0.13%)libsystem_malloc.dylib`nanov2_malloc_type (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::iter::traits::collect::FromIterator<char>>::from_iter (201 samples, 0.42%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::mssql::MsSqlDialect as sqlparser::dialect::Dialect>::is_identifier_part (105 samples, 0.22%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$malloc (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcmp (176 samples, 0.37%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::str::_<impl str>::to_uppercase (27 samples, 0.06%)libsystem_malloc.dylib`_malloc_zone_malloc (37 samples, 0.08%)libsystem_malloc.dylib`nanov2_malloc_type (57 samples, 0.12%)libsystem_malloc.dylib`nanov2_allocate_outlined (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$malloc (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Token::make_word (300 samples, 0.63%)sqlparser_bench-959bc5267970ca34`alloc::str::_<impl str>::to_uppercase (111 samples, 0.23%)libsystem_malloc.dylib`nanov2_malloc_type (57 samples, 0.12%)libsystem_malloc.dylib`_nanov2_free (68 samples, 0.14%)libsystem_malloc.dylib`_malloc_zone_malloc (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$malloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (149 samples, 0.31%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (72 samples, 0.15%)libsystem_malloc.dylib`nanov2_malloc_type (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_identifier_or_keyword (2,345 samples, 4.92%)sqlpar..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_word (492 samples, 1.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_with_location (4,225 samples, 8.87%)sqlparser_ben..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_with_location_into_buf (4,059 samples, 8.52%)sqlparser_be..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_word (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_with_sql (4,258 samples, 8.94%)sqlparser_ben..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_with_location_into_buf (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_sql (12,017 samples, 25.24%)sqlparser_bench-959bc5267970ca34`sqlpars..sqlparser_bench-959bc5267970ca34`criterion::bencher::Bencher<M>::iter (16,292 samples, 34.21%)sqlparser_bench-959bc5267970ca34`criterion::bencher::Be..sqlparser_bench-959bc5267970ca34`criterion::benchmark_group::BenchmarkGroup<M>::bench_function (35,307 samples, 74.15%)sqlparser_bench-959bc5267970ca34`criterion::benchmark_group::BenchmarkGroup<M>::bench_functionsqlparser_bench-959bc5267970ca34`criterion::analysis::common (35,307 samples, 74.15%)sqlparser_bench-959bc5267970ca34`criterion::analysis::commonsqlparser_bench-959bc5267970ca34`criterion::routine::Routine::sample (35,254 samples, 74.04%)sqlparser_bench-959bc5267970ca34`criterion::routine::Routine::samplesqlparser_bench-959bc5267970ca34`<criterion::routine::Function<M,F,T> as criterion::routine::Routine<M,T>>::warm_up (16,308 samples, 34.25%)sqlparser_bench-959bc5267970ca34`<criterion::routine::Fu..dyld`start (35,315 samples, 74.16%)dyld`startsqlparser_bench-959bc5267970ca34`main (35,314 samples, 74.16%)sqlparser_bench-959bc5267970ca34`mainsqlparser_bench-959bc5267970ca34`std::rt::lang_start_internal (35,314 samples, 74.16%)sqlparser_bench-959bc5267970ca34`std::rt::lang_start_internalsqlparser_bench-959bc5267970ca34`std::rt::lang_start::_{{closure}} (35,314 samples, 74.16%)sqlparser_bench-959bc5267970ca34`std::rt::lang_start::_{{closure}}sqlparser_bench-959bc5267970ca34`std::sys::backtrace::__rust_begin_short_backtrace (35,314 samples, 74.16%)sqlparser_bench-959bc5267970ca34`std::sys::backtrace::__rust_begin_short_backtracesqlparser_bench-959bc5267970ca34`sqlparser_bench::main (35,314 samples, 74.16%)sqlparser_bench-959bc5267970ca34`sqlparser_bench::mainsqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_sql (5 samples, 0.01%)libsystem_kernel.dylib`swtch_pri (133 samples, 0.28%)libsystem_m.dylib`exp (23 samples, 0.05%)libsystem_m.dylib`exp (43 samples, 0.09%)libsystem_m.dylib`exp (47 samples, 0.10%)libsystem_m.dylib`exp (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (8 samples, 0.02%)libsystem_m.dylib`exp (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)libsystem_m.dylib`exp (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::resamples::Resamples<A>::next (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (108 samples, 0.23%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::bivariate::resamples::Resamples<X,Y>::next (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (224 samples, 0.47%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (283 samples, 0.59%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (12 samples, 0.03%)libsystem_m.dylib`exp (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (134 samples, 0.28%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (121 samples, 0.25%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (492 samples, 1.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (440 samples, 0.92%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)libsystem_m.dylib`exp (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (11 samples, 0.02%)libsystem_m.dylib`exp (70 samples, 0.15%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (126 samples, 0.26%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (9 samples, 0.02%)libsystem_m.dylib`exp (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (137 samples, 0.29%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (125 samples, 0.26%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (311 samples, 0.65%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (309 samples, 0.65%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (284 samples, 0.60%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (943 samples, 1.98%)s..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (860 samples, 1.81%)s..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (7 samples, 0.01%)libsystem_m.dylib`exp (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (105 samples, 0.22%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (8 samples, 0.02%)libsystem_m.dylib`exp (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (115 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (106 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (278 samples, 0.58%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (229 samples, 0.48%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (103 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (104 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (96 samples, 0.20%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (276 samples, 0.58%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (270 samples, 0.57%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (228 samples, 0.48%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (653 samples, 1.37%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (647 samples, 1.36%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (592 samples, 1.24%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::bivariate::resamples::Resamples<X,Y>::next (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (104 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (100 samples, 0.21%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (1,836 samples, 3.86%)sqlp..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (1,775 samples, 3.73%)sqlp..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (169 samples, 0.35%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (168 samples, 0.35%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (168 samples, 0.35%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (166 samples, 0.35%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)libsystem_m.dylib`exp (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (13 samples, 0.03%)libsystem_m.dylib`exp (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (7 samples, 0.01%)libsystem_m.dylib`exp (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (7 samples, 0.01%)libsystem_m.dylib`exp (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (159 samples, 0.33%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (123 samples, 0.26%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (54 samples, 0.11%)libsystem_m.dylib`exp (30 samples, 0.06%)libsystem_m.dylib`exp (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (60 samples, 0.13%)libsystem_m.dylib`exp (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (169 samples, 0.35%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (167 samples, 0.35%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (129 samples, 0.27%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (408 samples, 0.86%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (364 samples, 0.76%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (8 samples, 0.02%)libsystem_m.dylib`exp (29 samples, 0.06%)libsystem_m.dylib`exp (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (13 samples, 0.03%)libsystem_m.dylib`exp (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (46 samples, 0.10%)libsystem_m.dylib`exp (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (129 samples, 0.27%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (88 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)libsystem_m.dylib`exp (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (41 samples, 0.09%)libsystem_m.dylib`exp (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (142 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (96 samples, 0.20%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (375 samples, 0.79%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (367 samples, 0.77%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (332 samples, 0.70%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (73 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (147 samples, 0.31%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (147 samples, 0.31%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (143 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (1,005 samples, 2.11%)s..sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (998 samples, 2.10%)s..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (945 samples, 1.98%)s..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (148 samples, 0.31%)libsystem_m.dylib`exp (9 samples, 0.02%)libsystem_m.dylib`exp (6 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (114 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (107 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (185 samples, 0.39%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (178 samples, 0.37%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (75 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (75 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (3,267 samples, 6.86%)sqlparser..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (3,209 samples, 6.74%)sqlparser..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (364 samples, 0.76%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (362 samples, 0.76%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (360 samples, 0.76%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (348 samples, 0.73%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (87 samples, 0.18%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (87 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (86 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (82 samples, 0.17%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (24 samples, 0.05%)libsystem_m.dylib`exp (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)libsystem_m.dylib`exp (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)libsystem_m.dylib`exp (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (9 samples, 0.02%)libsystem_m.dylib`exp (15 samples, 0.03%)libsystem_m.dylib`exp (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (8 samples, 0.02%)libsystem_m.dylib`exp (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)libsystem_m.dylib`exp (14 samples, 0.03%)libsystem_m.dylib`exp (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (13 samples, 0.03%)libsystem_m.dylib`exp (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (177 samples, 0.37%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (142 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (74 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (11 samples, 0.02%)libsystem_m.dylib`exp (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)libsystem_m.dylib`exp (19 samples, 0.04%)libsystem_m.dylib`exp (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (8 samples, 0.02%)libsystem_m.dylib`exp (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (74 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_m.dylib`exp (15 samples, 0.03%)libsystem_m.dylib`exp (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (73 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (223 samples, 0.47%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (219 samples, 0.46%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (183 samples, 0.38%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (466 samples, 0.98%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (409 samples, 0.86%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_m.dylib`exp (52 samples, 0.11%)libsystem_m.dylib`exp (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (9 samples, 0.02%)libsystem_m.dylib`exp (19 samples, 0.04%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (11 samples, 0.02%)libsystem_m.dylib`exp (12 samples, 0.03%)libsystem_m.dylib`exp (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (12 samples, 0.03%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (150 samples, 0.32%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (105 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (7 samples, 0.01%)libsystem_m.dylib`exp (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)libsystem_m.dylib`exp (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (8 samples, 0.02%)libsystem_m.dylib`exp (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (145 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (141 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (113 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (70 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (435 samples, 0.91%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (431 samples, 0.91%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (370 samples, 0.78%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (88 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (81 samples, 0.17%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (1,110 samples, 2.33%)s..sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (1,106 samples, 2.32%)s..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (1,064 samples, 2.23%)s..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (156 samples, 0.33%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (155 samples, 0.33%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (155 samples, 0.33%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (153 samples, 0.32%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)libsystem_kernel.dylib`swtch_pri (5 samples, 0.01%)libsystem_m.dylib`exp (12 samples, 0.03%)libsystem_m.dylib`exp (11 samples, 0.02%)libsystem_m.dylib`exp (14 samples, 0.03%)libsystem_m.dylib`exp (10 samples, 0.02%)libsystem_m.dylib`exp (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (103 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (91 samples, 0.19%)libsystem_m.dylib`exp (6 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (194 samples, 0.41%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (178 samples, 0.37%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)libsystem_m.dylib`exp (10 samples, 0.02%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (129 samples, 0.27%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (128 samples, 0.27%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (117 samples, 0.25%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (385 samples, 0.81%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (369 samples, 0.77%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (132 samples, 0.28%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (70 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (64 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (142 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (140 samples, 0.29%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (133 samples, 0.28%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (684 samples, 1.44%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (681 samples, 1.43%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (667 samples, 1.40%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (145 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (690 samples, 1.45%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5,126 samples, 10.77%)sqlparser_bench-..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5,072 samples, 10.65%)sqlparser_bench-..libsystem_m.dylib`exp (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (7 samples, 0.01%)libsystem_m.dylib`exp (18 samples, 0.04%)libsystem_m.dylib`exp (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)libsystem_m.dylib`exp (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (124 samples, 0.26%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (95 samples, 0.20%)libsystem_m.dylib`exp (18 samples, 0.04%)libsystem_m.dylib`exp (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (112 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (109 samples, 0.23%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (86 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (285 samples, 0.60%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (260 samples, 0.55%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)libsystem_m.dylib`exp (23 samples, 0.05%)libsystem_m.dylib`exp (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)libsystem_m.dylib`exp (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (75 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)libsystem_m.dylib`exp (7 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (60 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (59 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (276 samples, 0.58%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (272 samples, 0.57%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (248 samples, 0.52%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (86 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (84 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (159 samples, 0.33%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (155 samples, 0.33%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (880 samples, 1.85%)s..sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (878 samples, 1.84%)s..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (847 samples, 1.78%)s..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (279 samples, 0.59%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (278 samples, 0.58%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (278 samples, 0.58%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (269 samples, 0.56%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_kernel.dylib`swtch_pri (12 samples, 0.03%)libsystem_m.dylib`exp (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)libsystem_m.dylib`exp (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)libsystem_m.dylib`exp (17 samples, 0.04%)libsystem_m.dylib`exp (17 samples, 0.04%)libsystem_m.dylib`exp (20 samples, 0.04%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (43 samples, 0.09%)libsystem_m.dylib`exp (5 samples, 0.01%)libsystem_m.dylib`exp (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (112 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (88 samples, 0.18%)libsystem_m.dylib`exp (15 samples, 0.03%)libsystem_m.dylib`exp (6 samples, 0.01%)libsystem_m.dylib`exp (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)libsystem_m.dylib`exp (9 samples, 0.02%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (107 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (104 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (83 samples, 0.17%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (241 samples, 0.51%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (221 samples, 0.46%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)libsystem_m.dylib`exp (15 samples, 0.03%)libsystem_m.dylib`exp (13 samples, 0.03%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)libsystem_m.dylib`exp (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (66 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (6 samples, 0.01%)libsystem_m.dylib`exp (10 samples, 0.02%)libsystem_m.dylib`exp (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (188 samples, 0.39%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (183 samples, 0.38%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (164 samples, 0.34%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (485 samples, 1.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (463 samples, 0.97%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)libsystem_m.dylib`exp (14 samples, 0.03%)libsystem_m.dylib`exp (12 samples, 0.03%)libsystem_m.dylib`exp (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)libsystem_m.dylib`exp (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (84 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (70 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (26 samples, 0.05%)libsystem_m.dylib`exp (13 samples, 0.03%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (231 samples, 0.49%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (227 samples, 0.48%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (211 samples, 0.44%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (90 samples, 0.19%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (86 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (922 samples, 1.94%)s..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (896 samples, 1.88%)s..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (178 samples, 0.37%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (176 samples, 0.37%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (176 samples, 0.37%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (169 samples, 0.35%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)libsystem_m.dylib`exp (7 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (58 samples, 0.12%)libsystem_m.dylib`exp (6 samples, 0.01%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (220 samples, 0.46%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (220 samples, 0.46%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (204 samples, 0.43%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (78 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (77 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (19 samples, 0.04%)libsystem_kernel.dylib`swtch_pri (5 samples, 0.01%)libsystem_m.dylib`exp (9 samples, 0.02%)libsystem_m.dylib`exp (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (40 samples, 0.08%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (107 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (94 samples, 0.20%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (215 samples, 0.45%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (210 samples, 0.44%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (33 samples, 0.07%)libsystem_m.dylib`exp (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (101 samples, 0.21%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (100 samples, 0.21%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (97 samples, 0.20%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (1,554 samples, 3.26%)sql..sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (1,554 samples, 3.26%)sql..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (1,525 samples, 3.20%)sql..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (380 samples, 0.80%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (375 samples, 0.79%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (374 samples, 0.79%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (363 samples, 0.76%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (103 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7,610 samples, 15.98%)sqlparser_bench-959bc5267..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7,579 samples, 15.92%)sqlparser_bench-959bc526..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (1,568 samples, 3.29%)sql..sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (64 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (22 samples, 0.05%)libsystem_m.dylib`exp (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (71 samples, 0.15%)libsystem_m.dylib`exp (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (67 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (67 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (133 samples, 0.28%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (132 samples, 0.28%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (193 samples, 0.41%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (193 samples, 0.41%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (193 samples, 0.41%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (192 samples, 0.40%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (298 samples, 0.63%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (298 samples, 0.63%)libsystem_m.dylib`exp (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (59 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (116 samples, 0.24%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (116 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (116 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (116 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (453 samples, 0.95%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (453 samples, 0.95%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (509 samples, 1.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (509 samples, 1.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (514 samples, 1.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (514 samples, 1.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (514 samples, 1.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (514 samples, 1.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (517 samples, 1.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (515 samples, 1.08%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8,206 samples, 17.23%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (525 samples, 1.10%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<core::iter::adapters::chain::Chain<A,B> as core::iter::traits::iterator::Iterator>::try_fold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`crossbeam_deque::deque::Stealer<T>::steal (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`crossbeam_epoch::default::with_handle (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::find_work (14 samples, 0.03%)libsystem_kernel.dylib`__psynch_cvwait (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::sleep::Sleep::sleep (8 samples, 0.02%)libsystem_pthread.dylib`_pthread_mutex_firstfit_lock_slow (10 samples, 0.02%)libsystem_kernel.dylib`__psynch_mutexwait (10 samples, 0.02%)libsystem_pthread.dylib`thread_start (8,379 samples, 17.60%)libsystem_pthread.dylib`thr..libsystem_pthread.dylib`_pthread_start (8,379 samples, 17.60%)libsystem_pthread.dylib`_pt..sqlparser_bench-959bc5267970ca34`std::sys::pal::unix::thread::Thread::new::thread_start (8,379 samples, 17.60%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`core::ops::function::FnOnce::call_once{{vtable.shim}} (8,379 samples, 17.60%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`std::sys::backtrace::__rust_begin_short_backtrace (8,379 samples, 17.60%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`rayon_core::registry::ThreadBuilder::run (8,379 samples, 17.60%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8,379 samples, 17.60%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`rayon_core::sleep::Sleep::wake_any_threads (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::sleep::Sleep::wake_specific_thread (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (12 samples, 0.03%)libsystem_malloc.dylib`_realloc (12 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_realloc (12 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (133 samples, 0.28%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (96 samples, 0.20%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::operator::BinaryOperator as core::fmt::Display>::fmt (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::value::Value as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_str (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,141 samples, 2.40%)sq..sqlparser_bench-959bc5267970ca34`core::fmt::write (713 samples, 1.50%)libdyld.dylib`tlv_get_addr (127 samples, 0.27%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (124 samples, 0.26%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (408 samples, 0.86%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (113 samples, 0.24%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,846 samples, 3.88%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,846 samples, 3.88%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,846 samples, 3.88%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,846 samples, 3.88%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::value::Value as core::fmt::Display>::fmt (297 samples, 0.62%)sqlparser_bench-959bc5267970ca34`core::fmt::write (117 samples, 0.25%)libsystem_platform.dylib`_platform_memmove (253 samples, 0.53%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (108 samples, 0.23%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (372 samples, 0.78%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::value::Value as core::fmt::Display>::fmt (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_fmt (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_str (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (127 samples, 0.27%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (101 samples, 0.21%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,396 samples, 2.93%)sq..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,396 samples, 2.93%)sq..sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (71 samples, 0.15%)libsystem_platform.dylib`_platform_memmove (84 samples, 0.18%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,589 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,589 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::value::Value as core::fmt::Display>::fmt (193 samples, 0.41%)sqlparser_bench-959bc5267970ca34`core::fmt::write (193 samples, 0.41%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (1,598 samples, 3.36%)sql..libsystem_malloc.dylib`free_tiny (6 samples, 0.01%)libsystem_malloc.dylib`tiny_free_no_lock (6 samples, 0.01%)libsystem_malloc.dylib`tiny_free_list_add_ptr (7 samples, 0.01%)libsystem_malloc.dylib`tiny_free_scan_madvise_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (20 samples, 0.04%)libsystem_malloc.dylib`free_tiny (20 samples, 0.04%)libsystem_malloc.dylib`tiny_free_no_lock (20 samples, 0.04%)libsystem_malloc.dylib`tiny_free_scan_madvise_free (5 samples, 0.01%)libsystem_kernel.dylib`madvise (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (9 samples, 0.02%)libsystem_malloc.dylib`tiny_free_no_lock (9 samples, 0.02%)libsystem_malloc.dylib`tiny_free_list_add_ptr (8 samples, 0.02%)libsystem_malloc.dylib`tiny_free_list_remove_ptr (7 samples, 0.01%)libsystem_malloc.dylib`free_tiny (20 samples, 0.04%)libsystem_malloc.dylib`tiny_free_no_lock (20 samples, 0.04%)libsystem_malloc.dylib`free_tiny (11 samples, 0.02%)libsystem_malloc.dylib`tiny_free_no_lock (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (33 samples, 0.07%)libsystem_malloc.dylib`free_tiny (16 samples, 0.03%)libsystem_malloc.dylib`tiny_free_no_lock (12 samples, 0.03%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_szone_free (8 samples, 0.02%)libsystem_malloc.dylib`free_tiny (13 samples, 0.03%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (94 samples, 0.20%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (238 samples, 0.50%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (229 samples, 0.48%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (155 samples, 0.33%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (139 samples, 0.29%)libsystem_malloc.dylib`tiny_free_scan_madvise_free (11 samples, 0.02%)libsystem_kernel.dylib`madvise (11 samples, 0.02%)libsystem_malloc.dylib`free_tiny (15 samples, 0.03%)libsystem_malloc.dylib`tiny_free_no_lock (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (18 samples, 0.04%)libsystem_kernel.dylib`madvise (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (8 samples, 0.02%)libsystem_malloc.dylib`tiny_free_no_lock (8 samples, 0.02%)libsystem_malloc.dylib`tiny_madvise_free_range_no_lock (6 samples, 0.01%)libsystem_malloc.dylib`tiny_free_list_remove_ptr (6 samples, 0.01%)libsystem_malloc.dylib`free_tiny (12 samples, 0.03%)libsystem_malloc.dylib`tiny_free_no_lock (12 samples, 0.03%)libsystem_malloc.dylib`free_tiny (11 samples, 0.02%)libsystem_malloc.dylib`tiny_free_no_lock (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)libsystem_malloc.dylib`free_tiny (16 samples, 0.03%)libsystem_malloc.dylib`tiny_free_no_lock (16 samples, 0.03%)libsystem_malloc.dylib`tiny_free_no_lock (12 samples, 0.03%)libsystem_malloc.dylib`free_tiny (16 samples, 0.03%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_szone_free (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (36 samples, 0.08%)libsystem_malloc.dylib`free_tiny (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (147 samples, 0.31%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (147 samples, 0.31%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (102 samples, 0.21%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (86 samples, 0.18%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (34 samples, 0.07%)all (47,617 samples, 100%) \ No newline at end of file diff --git a/src/ast/data_type.rs b/src/ast/data_type.rs index 0897f2db..bc48341c 100644 --- a/src/ast/data_type.rs +++ b/src/ast/data_type.rs @@ -25,435 +25,309 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; -use crate::ast::{display_comma_separated, Expr, ObjectName, StructField, UnionField}; +use crate::ast::{display_comma_separated, ObjectName, StructField, UnionField}; use super::{value::escape_single_quote_string, ColumnDef}; -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum EnumMember { - Name(String), - /// ClickHouse allows to specify an integer value for each enum value. - /// - /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/data-types/enum) - NamedValue(String, Expr), -} - /// SQL data types #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum DataType { - /// Table type in [PostgreSQL], e.g. CREATE FUNCTION RETURNS TABLE(...). - /// - /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html - /// [MsSQL]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql?view=sql-server-ver16#c-create-a-multi-statement-table-valued-function - Table(Option>), - /// Table type with a name, e.g. CREATE FUNCTION RETURNS @result TABLE(...). - /// - /// [MsSQl]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql?view=sql-server-ver16#table - NamedTable { - /// Table name. - name: ObjectName, - /// Table columns. - columns: Vec, - }, - /// Fixed-length character type, e.g. CHARACTER(10). + /// Fixed-length character type e.g. CHARACTER(10) Character(Option), - /// Fixed-length char type, e.g. CHAR(10). + /// Fixed-length char type e.g. CHAR(10) Char(Option), - /// Character varying type, e.g. CHARACTER VARYING(10). + /// Character varying type e.g. CHARACTER VARYING(10) CharacterVarying(Option), - /// Char varying type, e.g. CHAR VARYING(10). + /// Char varying type e.g. CHAR VARYING(10) CharVarying(Option), - /// Variable-length character type, e.g. VARCHAR(10). + /// Variable-length character type e.g. VARCHAR(10) Varchar(Option), - /// Variable-length character type, e.g. NVARCHAR(10). + /// Variable-length character type e.g. NVARCHAR(10) Nvarchar(Option), - /// Uuid type. + /// Uuid type Uuid, - /// Large character object with optional length, - /// e.g. CHARACTER LARGE OBJECT, CHARACTER LARGE OBJECT(1000), [SQL Standard]. + /// Large character object with optional length e.g. CHARACTER LARGE OBJECT, CHARACTER LARGE OBJECT(1000), [standard] /// - /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-type + /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-type CharacterLargeObject(Option), - /// Large character object with optional length, - /// e.g. CHAR LARGE OBJECT, CHAR LARGE OBJECT(1000), [SQL Standard]. + /// Large character object with optional length e.g. CHAR LARGE OBJECT, CHAR LARGE OBJECT(1000), [standard] /// - /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-type + /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-type CharLargeObject(Option), - /// Large character object with optional length, - /// e.g. CLOB, CLOB(1000), [SQL Standard]. + /// Large character object with optional length e.g. CLOB, CLOB(1000), [standard] /// - /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-type + /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-type /// [Oracle]: https://docs.oracle.com/javadb/10.10.1.2/ref/rrefclob.html Clob(Option), - /// Fixed-length binary type with optional length, - /// see [SQL Standard], [MS SQL Server]. + /// Fixed-length binary type with optional length e.g. [standard], [MS SQL Server] /// - /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#binary-string-type + /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#binary-string-type /// [MS SQL Server]: https://learn.microsoft.com/pt-br/sql/t-sql/data-types/binary-and-varbinary-transact-sql?view=sql-server-ver16 Binary(Option), - /// Variable-length binary with optional length type, - /// see [SQL Standard], [MS SQL Server]. + /// Variable-length binary with optional length type e.g. [standard], [MS SQL Server] /// - /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#binary-string-type + /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#binary-string-type /// [MS SQL Server]: https://learn.microsoft.com/pt-br/sql/t-sql/data-types/binary-and-varbinary-transact-sql?view=sql-server-ver16 - Varbinary(Option), - /// Large binary object with optional length, - /// see [SQL Standard], [Oracle]. + Varbinary(Option), + /// Large binary object with optional length e.g. BLOB, BLOB(1000), [standard], [Oracle] /// - /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#binary-large-object-string-type + /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#binary-large-object-string-type /// [Oracle]: https://docs.oracle.com/javadb/10.8.3.0/ref/rrefblob.html Blob(Option), - /// [MySQL] blob with up to 2**8 bytes. - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/blob.html - TinyBlob, - /// [MySQL] blob with up to 2**24 bytes. - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/blob.html - MediumBlob, - /// [MySQL] blob with up to 2**32 bytes. - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/blob.html - LongBlob, /// Variable-length binary data with optional length. /// - /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#bytes_type + /// [bigquery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#bytes_type Bytes(Option), - /// Numeric type with optional precision and scale, e.g. NUMERIC(10,2), [SQL Standard][1]. + /// Numeric type with optional precision and scale e.g. NUMERIC(10,2), [standard][1] /// /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type Numeric(ExactNumberInfo), - /// Decimal type with optional precision and scale, e.g. DECIMAL(10,2), [SQL Standard][1]. + /// Decimal type with optional precision and scale e.g. DECIMAL(10,2), [standard][1] /// /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type Decimal(ExactNumberInfo), - /// [BigNumeric] type used in BigQuery. + /// [BigNumeric] type used in BigQuery /// /// [BigNumeric]: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#bignumeric_literals BigNumeric(ExactNumberInfo), - /// This is alias for `BigNumeric` type used in BigQuery. + /// This is alias for `BigNumeric` type used in BigQuery /// /// [BigDecimal]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#decimal_types BigDecimal(ExactNumberInfo), - /// Dec type with optional precision and scale, e.g. DEC(10,2), [SQL Standard][1]. + /// Dec type with optional precision and scale e.g. DEC(10,2), [standard][1] /// /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type Dec(ExactNumberInfo), - /// Floating point with optional precision, e.g. FLOAT(8). + /// Floating point with optional precision e.g. FLOAT(8) Float(Option), - /// Tiny integer with optional display width, e.g. TINYINT or TINYINT(3). + /// Tiny integer with optional display width e.g. TINYINT or TINYINT(3) TinyInt(Option), - /// Unsigned tiny integer with optional display width, - /// e.g. TINYINT UNSIGNED or TINYINT(3) UNSIGNED. - TinyIntUnsigned(Option), - /// Unsigned tiny integer, e.g. UTINYINT - UTinyInt, - /// Int2 is an alias for SmallInt in [PostgreSQL]. - /// Note: Int2 means 2 bytes in PostgreSQL (not 2 bits). - /// Int2 with optional display width, e.g. INT2 or INT2(5). + /// Unsigned tiny integer with optional display width e.g. TINYINT UNSIGNED or TINYINT(3) UNSIGNED + UnsignedTinyInt(Option), + /// Int2 as alias for SmallInt in [postgresql] + /// Note: Int2 mean 2 bytes in postgres (not 2 bits) + /// Int2 with optional display width e.g. INT2 or INT2(5) /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html + /// [postgresql]: https://www.postgresql.org/docs/15/datatype.html Int2(Option), - /// Unsigned Int2 with optional display width, e.g. INT2 UNSIGNED or INT2(5) UNSIGNED. - Int2Unsigned(Option), - /// Small integer with optional display width, e.g. SMALLINT or SMALLINT(5). + /// Unsigned Int2 with optional display width e.g. INT2 Unsigned or INT2(5) Unsigned + UnsignedInt2(Option), + /// Small integer with optional display width e.g. SMALLINT or SMALLINT(5) SmallInt(Option), - /// Unsigned small integer with optional display width, - /// e.g. SMALLINT UNSIGNED or SMALLINT(5) UNSIGNED. - SmallIntUnsigned(Option), - /// Unsigned small integer, e.g. USMALLINT. - USmallInt, - /// MySQL medium integer ([1]) with optional display width, - /// e.g. MEDIUMINT or MEDIUMINT(5). + /// Unsigned small integer with optional display width e.g. SMALLINT UNSIGNED or SMALLINT(5) UNSIGNED + UnsignedSmallInt(Option), + /// MySQL medium integer ([1]) with optional display width e.g. MEDIUMINT or MEDIUMINT(5) /// /// [1]: https://dev.mysql.com/doc/refman/8.0/en/integer-types.html MediumInt(Option), - /// Unsigned medium integer ([1]) with optional display width, - /// e.g. MEDIUMINT UNSIGNED or MEDIUMINT(5) UNSIGNED. + /// Unsigned medium integer ([1]) with optional display width e.g. MEDIUMINT UNSIGNED or MEDIUMINT(5) UNSIGNED /// /// [1]: https://dev.mysql.com/doc/refman/8.0/en/integer-types.html - MediumIntUnsigned(Option), - /// Int with optional display width, e.g. INT or INT(11). + UnsignedMediumInt(Option), + /// Int with optional display width e.g. INT or INT(11) Int(Option), - /// Int4 is an alias for Integer in [PostgreSQL]. - /// Note: Int4 means 4 bytes in PostgreSQL (not 4 bits). - /// Int4 with optional display width, e.g. Int4 or Int4(11). + /// Int4 as alias for Integer in [postgresql] + /// Note: Int4 mean 4 bytes in postgres (not 4 bits) + /// Int4 with optional display width e.g. Int4 or Int4(11) /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html + /// [postgresql]: https://www.postgresql.org/docs/15/datatype.html Int4(Option), - /// Int8 is an alias for BigInt in [PostgreSQL] and Integer type in [ClickHouse]. - /// Int8 with optional display width, e.g. INT8 or INT8(11). - /// Note: Int8 means 8 bytes in [PostgreSQL], but 8 bits in [ClickHouse]. + /// Int8 as alias for Bigint in [postgresql] and integer type in [clickhouse] + /// Note: Int8 mean 8 bytes in [postgresql] (not 8 bits) + /// Int8 with optional display width e.g. INT8 or INT8(11) + /// Note: Int8 mean 8 bits in [clickhouse] /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [postgresql]: https://www.postgresql.org/docs/15/datatype.html + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint Int8(Option), - /// Integer type in [ClickHouse]. - /// Note: Int16 means 16 bits in [ClickHouse]. + /// Integer type in [clickhouse] + /// Note: Int16 mean 16 bits in [clickhouse] /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint Int16, - /// Integer type in [ClickHouse]. - /// Note: Int32 means 32 bits in [ClickHouse]. + /// Integer type in [clickhouse] + /// Note: Int16 mean 32 bits in [clickhouse] /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint Int32, - /// Integer type in [BigQuery], [ClickHouse]. + /// Integer type in [bigquery], [clickhouse] /// - /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#integer_types - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [bigquery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#integer_types + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint Int64, - /// Integer type in [ClickHouse]. - /// Note: Int128 means 128 bits in [ClickHouse]. + /// Integer type in [clickhouse] + /// Note: Int128 mean 128 bits in [clickhouse] /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint Int128, - /// Integer type in [ClickHouse]. - /// Note: Int256 means 256 bits in [ClickHouse]. + /// Integer type in [clickhouse] + /// Note: Int256 mean 256 bits in [clickhouse] /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint Int256, - /// Integer with optional display width, e.g. INTEGER or INTEGER(11). + /// Integer with optional display width e.g. INTEGER or INTEGER(11) Integer(Option), - /// Unsigned int with optional display width, e.g. INT UNSIGNED or INT(11) UNSIGNED. - IntUnsigned(Option), - /// Unsigned int4 with optional display width, e.g. INT4 UNSIGNED or INT4(11) UNSIGNED. - Int4Unsigned(Option), - /// Unsigned integer with optional display width, e.g. INTEGER UNSIGNED or INTEGER(11) UNSIGNED. - IntegerUnsigned(Option), - /// 128-bit integer type, e.g. HUGEINT. - HugeInt, - /// Unsigned 128-bit integer type, e.g. UHUGEINT. - UHugeInt, - /// Unsigned integer type in [ClickHouse]. - /// Note: UInt8 means 8 bits in [ClickHouse]. + /// Unsigned int with optional display width e.g. INT UNSIGNED or INT(11) UNSIGNED + UnsignedInt(Option), + /// Unsigned int4 with optional display width e.g. INT4 UNSIGNED or INT4(11) UNSIGNED + UnsignedInt4(Option), + /// Unsigned integer with optional display width e.g. INTEGER UNSIGNED or INTEGER(11) UNSIGNED + UnsignedInteger(Option), + /// Unsigned integer type in [clickhouse] + /// Note: UInt8 mean 8 bits in [clickhouse] /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint UInt8, - /// Unsigned integer type in [ClickHouse]. - /// Note: UInt16 means 16 bits in [ClickHouse]. + /// Unsigned integer type in [clickhouse] + /// Note: UInt16 mean 16 bits in [clickhouse] /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint UInt16, - /// Unsigned integer type in [ClickHouse]. - /// Note: UInt32 means 32 bits in [ClickHouse]. + /// Unsigned integer type in [clickhouse] + /// Note: UInt32 mean 32 bits in [clickhouse] /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint UInt32, - /// Unsigned integer type in [ClickHouse]. - /// Note: UInt64 means 64 bits in [ClickHouse]. + /// Unsigned integer type in [clickhouse] + /// Note: UInt64 mean 64 bits in [clickhouse] /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint UInt64, - /// Unsigned integer type in [ClickHouse]. - /// Note: UInt128 means 128 bits in [ClickHouse]. + /// Unsigned integer type in [clickhouse] + /// Note: UInt128 mean 128 bits in [clickhouse] /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint UInt128, - /// Unsigned integer type in [ClickHouse]. - /// Note: UInt256 means 256 bits in [ClickHouse]. + /// Unsigned integer type in [clickhouse] + /// Note: UInt256 mean 256 bits in [clickhouse] /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint UInt256, - /// Big integer with optional display width, e.g. BIGINT or BIGINT(20). + /// Big integer with optional display width e.g. BIGINT or BIGINT(20) BigInt(Option), - /// Unsigned big integer with optional display width, e.g. BIGINT UNSIGNED or BIGINT(20) UNSIGNED. - BigIntUnsigned(Option), - /// Unsigned big integer, e.g. UBIGINT. - UBigInt, - /// Unsigned Int8 with optional display width, e.g. INT8 UNSIGNED or INT8(11) UNSIGNED. - Int8Unsigned(Option), - /// Signed integer as used in [MySQL CAST] target types, without optional `INTEGER` suffix, - /// e.g. `SIGNED` + /// Unsigned big integer with optional display width e.g. BIGINT UNSIGNED or BIGINT(20) UNSIGNED + UnsignedBigInt(Option), + /// Unsigned Int8 with optional display width e.g. INT8 UNSIGNED or INT8(11) UNSIGNED + UnsignedInt8(Option), + /// Float4 as alias for Real in [postgresql] /// - /// [MySQL CAST]: https://dev.mysql.com/doc/refman/8.4/en/cast-functions.html - Signed, - /// Signed integer as used in [MySQL CAST] target types, with optional `INTEGER` suffix, - /// e.g. `SIGNED INTEGER` - /// - /// [MySQL CAST]: https://dev.mysql.com/doc/refman/8.4/en/cast-functions.html - SignedInteger, - /// Signed integer as used in [MySQL CAST] target types, without optional `INTEGER` suffix, - /// e.g. `SIGNED` - /// - /// [MySQL CAST]: https://dev.mysql.com/doc/refman/8.4/en/cast-functions.html - Unsigned, - /// Unsigned integer as used in [MySQL CAST] target types, with optional `INTEGER` suffix, - /// e.g. `UNSIGNED INTEGER`. - /// - /// [MySQL CAST]: https://dev.mysql.com/doc/refman/8.4/en/cast-functions.html - UnsignedInteger, - /// Float4 is an alias for Real in [PostgreSQL]. - /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html + /// [postgresql]: https://www.postgresql.org/docs/15/datatype.html Float4, - /// Floating point in [ClickHouse]. + /// Floating point in [clickhouse] /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/float + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/float Float32, - /// Floating point in [BigQuery]. + /// Floating point in [bigquery] /// - /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#floating_point_types - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/float + /// [bigquery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#floating_point_types + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/float Float64, - /// Floating point, e.g. REAL. + /// Floating point e.g. REAL Real, - /// Float8 is an alias for Double in [PostgreSQL]. + /// Float8 as alias for Double in [postgresql] /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html + /// [postgresql]: https://www.postgresql.org/docs/15/datatype.html Float8, /// Double - Double(ExactNumberInfo), - /// Double Precision, see [SQL Standard], [PostgreSQL]. + Double, + /// Double PRECISION e.g. [standard], [postgresql] /// - /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#approximate-numeric-type - /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype-numeric.html + /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#approximate-numeric-type + /// [postgresql]: https://www.postgresql.org/docs/current/datatype-numeric.html DoublePrecision, - /// Bool is an alias for Boolean, see [PostgreSQL]. + /// Bool as alias for Boolean in [postgresql] /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html + /// [postgresql]: https://www.postgresql.org/docs/15/datatype.html Bool, - /// Boolean type. + /// Boolean Boolean, - /// Date type. + /// Date Date, - /// Date32 with the same range as Datetime64. + /// Date32 with the same range as Datetime64 /// /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/date32 Date32, - /// Time with optional time precision and time zone information, see [SQL Standard][1]. + /// Time with optional time precision and time zone information e.g. [standard][1]. /// /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type Time(Option, TimezoneInfo), - /// Datetime with optional time precision, see [MySQL][1]. + /// Datetime with optional time precision e.g. [MySQL][1]. /// /// [1]: https://dev.mysql.com/doc/refman/8.0/en/datetime.html Datetime(Option), - /// Datetime with time precision and optional timezone, see [ClickHouse][1]. + /// Datetime with time precision and optional timezone e.g. [ClickHouse][1]. /// /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64 Datetime64(u64, Option), - /// Timestamp with optional time precision and time zone information, see [SQL Standard][1]. + /// Timestamp with optional time precision and time zone information e.g. [standard][1]. /// /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type Timestamp(Option, TimezoneInfo), - /// Databricks timestamp without time zone. See [1]. - /// - /// [1]: https://docs.databricks.com/aws/en/sql/language-manual/data-types/timestamp-ntz-type - TimestampNtz, - /// Interval type. + /// Interval Interval, - /// JSON type. + /// JSON type JSON, - /// Binary JSON type. + /// Binary JSON type JSONB, - /// Regclass used in [PostgreSQL] serial. - /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html + /// Regclass used in postgresql serial Regclass, - /// Text type. + /// Text Text, - /// [MySQL] text with up to 2**8 bytes. - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/blob.html - TinyText, - /// [MySQL] text with up to 2**24 bytes. - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/blob.html - MediumText, - /// [MySQL] text with up to 2**32 bytes. - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/blob.html - LongText, /// String with optional length. String(Option), /// A fixed-length string e.g [ClickHouse][1]. /// /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/fixedstring FixedString(u64), - /// Bytea type, see [PostgreSQL]. - /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype-bit.html + /// Bytea Bytea, - /// Bit string, see [PostgreSQL], [MySQL], or [MSSQL]. - /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype-bit.html - /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/bit-type.html - /// [MSSQL]: https://learn.microsoft.com/en-us/sql/t-sql/data-types/bit-transact-sql?view=sql-server-ver16 - Bit(Option), - /// `BIT VARYING(n)`: Variable-length bit string, see [PostgreSQL]. - /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype-bit.html - BitVarying(Option), - /// `VARBIT(n)`: Variable-length bit string. [PostgreSQL] alias for `BIT VARYING`. - /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html - VarBit(Option), - /// Custom types. + /// Custom type such as enums Custom(ObjectName, Vec), - /// Arrays. + /// Arrays Array(ArrayElemTypeDef), - /// Map, see [ClickHouse]. + /// Map /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/map + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/map Map(Box, Box), - /// Tuple, see [ClickHouse]. + /// Tuple /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple Tuple(Vec), - /// Nested type, see [ClickHouse]. + /// Nested /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/nested-data-structures/nested + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/nested-data-structures/nested Nested(Vec), - /// Enum type. - Enum(Vec, Option), - /// Set type. + /// Enums + Enum(Vec), + /// Set Set(Vec), - /// Struct type, see [Hive], [BigQuery]. + /// Struct /// - /// [Hive]: https://docs.cloudera.com/cdw-runtime/cloud/impala-sql-reference/topics/impala-struct.html - /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct_type + /// [hive]: https://docs.cloudera.com/cdw-runtime/cloud/impala-sql-reference/topics/impala-struct.html + /// [bigquery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct_type Struct(Vec, StructBracketKind), - /// Union type, see [DuckDB]. + /// Union /// - /// [DuckDB]: https://duckdb.org/docs/sql/data_types/union.html + /// [duckdb]: https://duckdb.org/docs/sql/data_types/union.html Union(Vec), /// Nullable - special marker NULL represents in ClickHouse as a data type. /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/nullable + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/nullable Nullable(Box), /// LowCardinality - changes the internal representation of other data types to be dictionary-encoded. /// - /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/lowcardinality + /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/lowcardinality LowCardinality(Box), /// No type specified - only used with /// [`SQLiteDialect`](crate::dialect::SQLiteDialect), from statements such /// as `CREATE TABLE t1 (a)`. Unspecified, - /// Trigger data type, returned by functions associated with triggers, see [PostgreSQL]. + /// Trigger data type, returned by functions associated with triggers /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/plpgsql-trigger.html + /// [postgresql]: https://www.postgresql.org/docs/current/plpgsql-trigger.html Trigger, - /// Any data type, used in BigQuery UDF definitions for templated parameters, see [BigQuery]. - /// - /// [BigQuery]: https://cloud.google.com/bigquery/docs/user-defined-functions#templated-sql-udf-parameters - AnyType, - /// Geometric type, see [PostgreSQL]. - /// - /// [PostgreSQL]: https://www.postgresql.org/docs/9.5/functions-geometry.html - GeometricType(GeometricTypeKind), - /// PostgreSQL text search vectors, see [PostgreSQL]. - /// - /// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-textsearch.html - TsVector, - /// PostgreSQL text search query, see [PostgreSQL]. - /// - /// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-textsearch.html - TsQuery, } impl fmt::Display for DataType { @@ -464,6 +338,7 @@ impl fmt::Display for DataType { DataType::CharacterVarying(size) => { format_character_string_type(f, "CHARACTER VARYING", size) } + DataType::CharVarying(size) => format_character_string_type(f, "CHAR VARYING", size), DataType::Varchar(size) => format_character_string_type(f, "VARCHAR", size), DataType::Nvarchar(size) => format_character_string_type(f, "NVARCHAR", size), @@ -476,11 +351,10 @@ impl fmt::Display for DataType { } DataType::Clob(size) => format_type_with_optional_length(f, "CLOB", size, false), DataType::Binary(size) => format_type_with_optional_length(f, "BINARY", size, false), - DataType::Varbinary(size) => format_varbinary_type(f, "VARBINARY", size), + DataType::Varbinary(size) => { + format_type_with_optional_length(f, "VARBINARY", size, false) + } DataType::Blob(size) => format_type_with_optional_length(f, "BLOB", size, false), - DataType::TinyBlob => write!(f, "TINYBLOB"), - DataType::MediumBlob => write!(f, "MEDIUMBLOB"), - DataType::LongBlob => write!(f, "LONGBLOB"), DataType::Bytes(size) => format_type_with_optional_length(f, "BYTES", size, false), DataType::Numeric(info) => { write!(f, "NUMERIC{info}") @@ -497,29 +371,29 @@ impl fmt::Display for DataType { DataType::TinyInt(zerofill) => { format_type_with_optional_length(f, "TINYINT", zerofill, false) } - DataType::TinyIntUnsigned(zerofill) => { + DataType::UnsignedTinyInt(zerofill) => { format_type_with_optional_length(f, "TINYINT", zerofill, true) } DataType::Int2(zerofill) => { format_type_with_optional_length(f, "INT2", zerofill, false) } - DataType::Int2Unsigned(zerofill) => { + DataType::UnsignedInt2(zerofill) => { format_type_with_optional_length(f, "INT2", zerofill, true) } DataType::SmallInt(zerofill) => { format_type_with_optional_length(f, "SMALLINT", zerofill, false) } - DataType::SmallIntUnsigned(zerofill) => { + DataType::UnsignedSmallInt(zerofill) => { format_type_with_optional_length(f, "SMALLINT", zerofill, true) } DataType::MediumInt(zerofill) => { format_type_with_optional_length(f, "MEDIUMINT", zerofill, false) } - DataType::MediumIntUnsigned(zerofill) => { + DataType::UnsignedMediumInt(zerofill) => { format_type_with_optional_length(f, "MEDIUMINT", zerofill, true) } DataType::Int(zerofill) => format_type_with_optional_length(f, "INT", zerofill, false), - DataType::IntUnsigned(zerofill) => { + DataType::UnsignedInt(zerofill) => { format_type_with_optional_length(f, "INT", zerofill, true) } DataType::Int4(zerofill) => { @@ -543,39 +417,24 @@ impl fmt::Display for DataType { DataType::Int256 => { write!(f, "Int256") } - DataType::HugeInt => { - write!(f, "HUGEINT") - } - DataType::Int4Unsigned(zerofill) => { + DataType::UnsignedInt4(zerofill) => { format_type_with_optional_length(f, "INT4", zerofill, true) } DataType::Integer(zerofill) => { format_type_with_optional_length(f, "INTEGER", zerofill, false) } - DataType::IntegerUnsigned(zerofill) => { + DataType::UnsignedInteger(zerofill) => { format_type_with_optional_length(f, "INTEGER", zerofill, true) } DataType::BigInt(zerofill) => { format_type_with_optional_length(f, "BIGINT", zerofill, false) } - DataType::BigIntUnsigned(zerofill) => { + DataType::UnsignedBigInt(zerofill) => { format_type_with_optional_length(f, "BIGINT", zerofill, true) } - DataType::Int8Unsigned(zerofill) => { + DataType::UnsignedInt8(zerofill) => { format_type_with_optional_length(f, "INT8", zerofill, true) } - DataType::UTinyInt => { - write!(f, "UTINYINT") - } - DataType::USmallInt => { - write!(f, "USMALLINT") - } - DataType::UBigInt => { - write!(f, "UBIGINT") - } - DataType::UHugeInt => { - write!(f, "UHUGEINT") - } DataType::UInt8 => { write!(f, "UInt8") } @@ -594,23 +453,11 @@ impl fmt::Display for DataType { DataType::UInt256 => { write!(f, "UInt256") } - DataType::Signed => { - write!(f, "SIGNED") - } - DataType::SignedInteger => { - write!(f, "SIGNED INTEGER") - } - DataType::Unsigned => { - write!(f, "UNSIGNED") - } - DataType::UnsignedInteger => { - write!(f, "UNSIGNED INTEGER") - } DataType::Real => write!(f, "REAL"), DataType::Float4 => write!(f, "FLOAT4"), DataType::Float32 => write!(f, "Float32"), DataType::Float64 => write!(f, "FLOAT64"), - DataType::Double(info) => write!(f, "DOUBLE{info}"), + DataType::Double => write!(f, "DOUBLE"), DataType::Float8 => write!(f, "FLOAT8"), DataType::DoublePrecision => write!(f, "DOUBLE PRECISION"), DataType::Bool => write!(f, "BOOL"), @@ -626,7 +473,6 @@ impl fmt::Display for DataType { DataType::Timestamp(precision, timezone_info) => { format_datetime_precision_and_tz(f, "TIMESTAMP", precision, timezone_info) } - DataType::TimestampNtz => write!(f, "TIMESTAMP_NTZ"), DataType::Datetime64(precision, timezone) => { format_clickhouse_datetime_precision_and_timezone( f, @@ -640,16 +486,8 @@ impl fmt::Display for DataType { DataType::JSONB => write!(f, "JSONB"), DataType::Regclass => write!(f, "REGCLASS"), DataType::Text => write!(f, "TEXT"), - DataType::TinyText => write!(f, "TINYTEXT"), - DataType::MediumText => write!(f, "MEDIUMTEXT"), - DataType::LongText => write!(f, "LONGTEXT"), DataType::String(size) => format_type_with_optional_length(f, "STRING", size, false), DataType::Bytea => write!(f, "BYTEA"), - DataType::Bit(size) => format_type_with_optional_length(f, "BIT", size, false), - DataType::BitVarying(size) => { - format_type_with_optional_length(f, "BIT VARYING", size, false) - } - DataType::VarBit(size) => format_type_with_optional_length(f, "VARBIT", size, false), DataType::Array(ty) => match ty { ArrayElemTypeDef::None => write!(f, "ARRAY"), ArrayElemTypeDef::SquareBracket(t, None) => write!(f, "{t}[]"), @@ -664,24 +502,13 @@ impl fmt::Display for DataType { write!(f, "{}({})", ty, modifiers.join(", ")) } } - DataType::Enum(vals, bits) => { - match bits { - Some(bits) => write!(f, "ENUM{bits}"), - None => write!(f, "ENUM"), - }?; - write!(f, "(")?; + DataType::Enum(vals) => { + write!(f, "ENUM(")?; for (i, v) in vals.iter().enumerate() { if i != 0 { write!(f, ", ")?; } - match v { - EnumMember::Name(name) => { - write!(f, "'{}'", escape_single_quote_string(name))? - } - EnumMember::NamedValue(name, value) => { - write!(f, "'{}' = {}", escape_single_quote_string(name), value)? - } - } + write!(f, "'{}'", escape_single_quote_string(v))?; } write!(f, ")") } @@ -714,16 +541,16 @@ impl fmt::Display for DataType { } // ClickHouse DataType::Nullable(data_type) => { - write!(f, "Nullable({data_type})") + write!(f, "Nullable({})", data_type) } DataType::FixedString(character_length) => { - write!(f, "FixedString({character_length})") + write!(f, "FixedString({})", character_length) } DataType::LowCardinality(data_type) => { - write!(f, "LowCardinality({data_type})") + write!(f, "LowCardinality({})", data_type) } DataType::Map(key_data_type, value_data_type) => { - write!(f, "Map({key_data_type}, {value_data_type})") + write!(f, "Map({}, {})", key_data_type, value_data_type) } DataType::Tuple(fields) => { write!(f, "Tuple({})", display_comma_separated(fields)) @@ -733,21 +560,6 @@ impl fmt::Display for DataType { } DataType::Unspecified => Ok(()), DataType::Trigger => write!(f, "TRIGGER"), - DataType::AnyType => write!(f, "ANY TYPE"), - DataType::Table(fields) => match fields { - Some(fields) => { - write!(f, "TABLE({})", display_comma_separated(fields)) - } - None => { - write!(f, "TABLE") - } - }, - DataType::NamedTable { name, columns } => { - write!(f, "{} TABLE ({})", name, display_comma_separated(columns)) - } - DataType::GeometricType(kind) => write!(f, "{kind}"), - DataType::TsVector => write!(f, "TSVECTOR"), - DataType::TsQuery => write!(f, "TSQUERY"), } } } @@ -780,18 +592,6 @@ fn format_character_string_type( Ok(()) } -fn format_varbinary_type( - f: &mut fmt::Formatter, - sql_type: &str, - size: &Option, -) -> fmt::Result { - write!(f, "{sql_type}")?; - if let Some(size) = size { - write!(f, "({size})")?; - } - Ok(()) -} - fn format_datetime_precision_and_tz( f: &mut fmt::Formatter, sql_type: &'static str, @@ -849,19 +649,19 @@ pub enum StructBracketKind { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum TimezoneInfo { - /// No information about time zone, e.g. TIMESTAMP + /// No information about time zone. E.g., TIMESTAMP None, - /// Temporal type 'WITH TIME ZONE', e.g. TIMESTAMP WITH TIME ZONE, [SQL Standard], [Oracle] + /// Temporal type 'WITH TIME ZONE'. E.g., TIMESTAMP WITH TIME ZONE, [standard], [Oracle] /// - /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type + /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type /// [Oracle]: https://docs.oracle.com/en/database/oracle/oracle-database/12.2/nlspg/datetime-data-types-and-time-zone-support.html#GUID-3F1C388E-C651-43D5-ADBC-1A49E5C2CA05 WithTimeZone, - /// Temporal type 'WITHOUT TIME ZONE', e.g. TIME WITHOUT TIME ZONE, [SQL Standard], [Postgresql] + /// Temporal type 'WITHOUT TIME ZONE'. E.g., TIME WITHOUT TIME ZONE, [standard], [Postgresql] /// - /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type + /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type /// [Postgresql]: https://www.postgresql.org/docs/current/datatype-datetime.html WithoutTimeZone, - /// Postgresql specific `WITH TIME ZONE` formatting, for both TIME and TIMESTAMP, e.g. TIMETZ, [Postgresql] + /// Postgresql specific `WITH TIME ZONE` formatting, for both TIME and TIMESTAMP. E.g., TIMETZ, [Postgresql] /// /// [Postgresql]: https://www.postgresql.org/docs/current/datatype-datetime.html Tz, @@ -890,18 +690,18 @@ impl fmt::Display for TimezoneInfo { } /// Additional information for `NUMERIC`, `DECIMAL`, and `DEC` data types -/// following the 2016 [SQL Standard]. +/// following the 2016 [standard]. /// -/// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type +/// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum ExactNumberInfo { - /// No additional information, e.g. `DECIMAL` + /// No additional information e.g. `DECIMAL` None, - /// Only precision information, e.g. `DECIMAL(10)` + /// Only precision information e.g. `DECIMAL(10)` Precision(u64), - /// Precision and scale information, e.g. `DECIMAL(10,2)` + /// Precision and scale information e.g. `DECIMAL(10,2)` PrecisionAndScale(u64, u64), } @@ -942,7 +742,7 @@ impl fmt::Display for CharacterLength { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { CharacterLength::IntegerLength { length, unit } => { - write!(f, "{length}")?; + write!(f, "{}", length)?; if let Some(unit) = unit { write!(f, " {unit}")?; } @@ -955,7 +755,7 @@ impl fmt::Display for CharacterLength { } } -/// Possible units for characters, initially based on 2016 ANSI [SQL Standard][1]. +/// Possible units for characters, initially based on 2016 ANSI [standard][1]. /// /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#char-length-units #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -981,32 +781,6 @@ impl fmt::Display for CharLengthUnits { } } -#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum BinaryLength { - IntegerLength { - /// Default (if VARYING) - length: u64, - }, - /// VARBINARY(MAX) used in T-SQL (Microsoft SQL Server) - Max, -} - -impl fmt::Display for BinaryLength { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - BinaryLength::IntegerLength { length } => { - write!(f, "{length}")?; - } - BinaryLength::Max => { - write!(f, "MAX")?; - } - } - Ok(()) - } -} - /// Represents the data type of the elements in an array (if any) as well as /// the syntax used to declare the array. /// @@ -1024,34 +798,3 @@ pub enum ArrayElemTypeDef { /// `Array(Int64)` Parenthesis(Box), } - -/// Represents different types of geometric shapes which are commonly used in -/// PostgreSQL/Redshift for spatial operations and geometry-related computations. -/// -/// [PostgreSQL]: https://www.postgresql.org/docs/9.5/functions-geometry.html -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum GeometricTypeKind { - Point, - Line, - LineSegment, - GeometricBox, - GeometricPath, - Polygon, - Circle, -} - -impl fmt::Display for GeometricTypeKind { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - GeometricTypeKind::Point => write!(f, "point"), - GeometricTypeKind::Line => write!(f, "line"), - GeometricTypeKind::LineSegment => write!(f, "lseg"), - GeometricTypeKind::GeometricBox => write!(f, "box"), - GeometricTypeKind::GeometricPath => write!(f, "path"), - GeometricTypeKind::Polygon => write!(f, "polygon"), - GeometricTypeKind::Circle => write!(f, "circle"), - } - } -} diff --git a/src/ast/dcl.rs b/src/ast/dcl.rs index 07989407..d47476ff 100644 --- a/src/ast/dcl.rs +++ b/src/ast/dcl.rs @@ -28,7 +28,7 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; -use super::{display_comma_separated, Expr, Ident, Password}; +use super::{Expr, Ident, Password}; use crate::ast::{display_separated, ObjectName}; /// An option in `ROLE` statement. @@ -173,7 +173,7 @@ impl fmt::Display for AlterRoleOperation { in_database, } => { if let Some(database_name) = in_database { - write!(f, "IN DATABASE {database_name} ")?; + write!(f, "IN DATABASE {} ", database_name)?; } match config_value { @@ -187,7 +187,7 @@ impl fmt::Display for AlterRoleOperation { in_database, } => { if let Some(database_name) = in_database { - write!(f, "IN DATABASE {database_name} ")?; + write!(f, "IN DATABASE {} ", database_name)?; } match config_name { @@ -204,51 +204,24 @@ impl fmt::Display for AlterRoleOperation { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum Use { - Catalog(ObjectName), // e.g. `USE CATALOG foo.bar` - Schema(ObjectName), // e.g. `USE SCHEMA foo.bar` - Database(ObjectName), // e.g. `USE DATABASE foo.bar` - Warehouse(ObjectName), // e.g. `USE WAREHOUSE foo.bar` - Role(ObjectName), // e.g. `USE ROLE PUBLIC` - SecondaryRoles(SecondaryRoles), // e.g. `USE SECONDARY ROLES ALL` - Object(ObjectName), // e.g. `USE foo.bar` - Default, // e.g. `USE DEFAULT` + Catalog(ObjectName), // e.g. `USE CATALOG foo.bar` + Schema(ObjectName), // e.g. `USE SCHEMA foo.bar` + Database(ObjectName), // e.g. `USE DATABASE foo.bar` + Warehouse(ObjectName), // e.g. `USE WAREHOUSE foo.bar` + Object(ObjectName), // e.g. `USE foo.bar` + Default, // e.g. `USE DEFAULT` } impl fmt::Display for Use { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("USE ")?; match self { - Use::Catalog(name) => write!(f, "CATALOG {name}"), - Use::Schema(name) => write!(f, "SCHEMA {name}"), - Use::Database(name) => write!(f, "DATABASE {name}"), - Use::Warehouse(name) => write!(f, "WAREHOUSE {name}"), - Use::Role(name) => write!(f, "ROLE {name}"), - Use::SecondaryRoles(secondary_roles) => { - write!(f, "SECONDARY ROLES {secondary_roles}") - } - Use::Object(name) => write!(f, "{name}"), + Use::Catalog(name) => write!(f, "CATALOG {}", name), + Use::Schema(name) => write!(f, "SCHEMA {}", name), + Use::Database(name) => write!(f, "DATABASE {}", name), + Use::Warehouse(name) => write!(f, "WAREHOUSE {}", name), + Use::Object(name) => write!(f, "{}", name), Use::Default => write!(f, "DEFAULT"), } } } - -/// Snowflake `SECONDARY ROLES` USE variant -/// See: -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum SecondaryRoles { - All, - None, - List(Vec), -} - -impl fmt::Display for SecondaryRoles { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - SecondaryRoles::All => write!(f, "ALL"), - SecondaryRoles::None => write!(f, "NONE"), - SecondaryRoles::List(roles) => write!(f, "{}", display_comma_separated(roles)), - } - } -} diff --git a/src/ast/ddl.rs b/src/ast/ddl.rs index 51e05784..21a716d2 100644 --- a/src/ast/ddl.rs +++ b/src/ast/ddl.rs @@ -30,48 +30,19 @@ use sqlparser_derive::{Visit, VisitMut}; use crate::ast::value::escape_single_quote_string; use crate::ast::{ - display_comma_separated, display_separated, ArgMode, CommentDef, CreateFunctionBody, - CreateFunctionUsing, DataType, Expr, FunctionBehavior, FunctionCalledOnNull, - FunctionDeterminismSpecifier, FunctionParallel, Ident, IndexColumn, MySQLColumnPosition, - ObjectName, OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag, - Value, ValueWithSpan, + display_comma_separated, display_separated, DataType, Expr, Ident, MySQLColumnPosition, + ObjectName, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag, Value, }; use crate::keywords::Keyword; use crate::tokenizer::Token; -/// ALTER TABLE operation REPLICA IDENTITY values -/// See [Postgres ALTER TABLE docs](https://www.postgresql.org/docs/current/sql-altertable.html) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum ReplicaIdentity { - None, - Full, - Default, - Index(Ident), -} - -impl fmt::Display for ReplicaIdentity { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ReplicaIdentity::None => f.write_str("NONE"), - ReplicaIdentity::Full => f.write_str("FULL"), - ReplicaIdentity::Default => f.write_str("DEFAULT"), - ReplicaIdentity::Index(idx) => write!(f, "USING INDEX {idx}"), - } - } -} - /// An `ALTER TABLE` (`Statement::AlterTable`) operation #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum AlterTableOperation { - /// `ADD [NOT VALID]` - AddConstraint { - constraint: TableConstraint, - not_valid: bool, - }, + /// `ADD ` + AddConstraint(TableConstraint), /// `ADD [COLUMN] [IF NOT EXISTS] ` AddColumn { /// `[COLUMN]`. @@ -92,14 +63,13 @@ pub enum AlterTableOperation { name: Ident, select: ProjectionSelect, }, + /// `DROP PROJECTION [IF EXISTS] name` /// /// Note: this is a ClickHouse-specific operation. /// Please refer to [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/alter/projection#drop-projection) - DropProjection { - if_exists: bool, - name: Ident, - }, + DropProjection { if_exists: bool, name: Ident }, + /// `MATERIALIZE PROJECTION [IF EXISTS] name [IN PARTITION partition_name]` /// /// Note: this is a ClickHouse-specific operation. @@ -109,6 +79,7 @@ pub enum AlterTableOperation { name: Ident, partition: Option, }, + /// `CLEAR PROJECTION [IF EXISTS] name [IN PARTITION partition_name]` /// /// Note: this is a ClickHouse-specific operation. @@ -118,6 +89,7 @@ pub enum AlterTableOperation { name: Ident, partition: Option, }, + /// `DISABLE ROW LEVEL SECURITY` /// /// Note: this is a PostgreSQL-specific operation. @@ -125,27 +97,22 @@ pub enum AlterTableOperation { /// `DISABLE RULE rewrite_rule_name` /// /// Note: this is a PostgreSQL-specific operation. - DisableRule { - name: Ident, - }, + DisableRule { name: Ident }, /// `DISABLE TRIGGER [ trigger_name | ALL | USER ]` /// /// Note: this is a PostgreSQL-specific operation. - DisableTrigger { - name: Ident, - }, + DisableTrigger { name: Ident }, /// `DROP CONSTRAINT [ IF EXISTS ] ` DropConstraint { if_exists: bool, name: Ident, - drop_behavior: Option, + cascade: bool, }, - /// `DROP [ COLUMN ] [ IF EXISTS ] [ , , ... ] [ CASCADE ]` + /// `DROP [ COLUMN ] [ IF EXISTS ] [ CASCADE ]` DropColumn { - has_column_keyword: bool, - column_names: Vec, + column_name: Ident, if_exists: bool, - drop_behavior: Option, + cascade: bool, }, /// `ATTACH PART|PARTITION ` /// Note: this is a ClickHouse-specific operation, please refer to @@ -178,48 +145,24 @@ pub enum AlterTableOperation { }, /// `DROP PRIMARY KEY` /// - /// Note: this is a [MySQL]-specific operation. - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html + /// Note: this is a MySQL-specific operation. DropPrimaryKey, - /// `DROP FOREIGN KEY ` - /// - /// Note: this is a [MySQL]-specific operation. - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html - DropForeignKey { - name: Ident, - }, - /// `DROP INDEX ` - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html - DropIndex { - name: Ident, - }, /// `ENABLE ALWAYS RULE rewrite_rule_name` /// /// Note: this is a PostgreSQL-specific operation. - EnableAlwaysRule { - name: Ident, - }, + EnableAlwaysRule { name: Ident }, /// `ENABLE ALWAYS TRIGGER trigger_name` /// /// Note: this is a PostgreSQL-specific operation. - EnableAlwaysTrigger { - name: Ident, - }, + EnableAlwaysTrigger { name: Ident }, /// `ENABLE REPLICA RULE rewrite_rule_name` /// /// Note: this is a PostgreSQL-specific operation. - EnableReplicaRule { - name: Ident, - }, + EnableReplicaRule { name: Ident }, /// `ENABLE REPLICA TRIGGER trigger_name` /// /// Note: this is a PostgreSQL-specific operation. - EnableReplicaTrigger { - name: Ident, - }, + EnableReplicaTrigger { name: Ident }, /// `ENABLE ROW LEVEL SECURITY` /// /// Note: this is a PostgreSQL-specific operation. @@ -227,27 +170,16 @@ pub enum AlterTableOperation { /// `ENABLE RULE rewrite_rule_name` /// /// Note: this is a PostgreSQL-specific operation. - EnableRule { - name: Ident, - }, + EnableRule { name: Ident }, /// `ENABLE TRIGGER [ trigger_name | ALL | USER ]` /// /// Note: this is a PostgreSQL-specific operation. - EnableTrigger { - name: Ident, - }, + EnableTrigger { name: Ident }, /// `RENAME TO PARTITION (partition=val)` RenamePartitions { old_partitions: Vec, new_partitions: Vec, }, - /// REPLICA IDENTITY { DEFAULT | USING INDEX index_name | FULL | NOTHING } - /// - /// Note: this is a PostgreSQL-specific operation. - /// Please refer to [PostgreSQL documentation](https://www.postgresql.org/docs/current/sql-altertable.html) - ReplicaIdentity { - identity: ReplicaIdentity, - }, /// Add Partitions AddPartitions { if_not_exists: bool, @@ -263,9 +195,7 @@ pub enum AlterTableOperation { new_column_name: Ident, }, /// `RENAME TO ` - RenameTable { - table_name: ObjectName, - }, + RenameTable { table_name: ObjectName }, // CHANGE [ COLUMN ] [ ] ChangeColumn { old_name: Ident, @@ -286,10 +216,7 @@ pub enum AlterTableOperation { /// `RENAME CONSTRAINT TO ` /// /// Note: this is a PostgreSQL-specific operation. - RenameConstraint { - old_name: Ident, - new_name: Ident, - }, + RenameConstraint { old_name: Ident, new_name: Ident }, /// `ALTER [ COLUMN ]` AlterColumn { column_name: Ident, @@ -298,59 +225,14 @@ pub enum AlterTableOperation { /// 'SWAP WITH ' /// /// Note: this is Snowflake specific - SwapWith { - table_name: ObjectName, - }, + SwapWith { table_name: ObjectName }, /// 'SET TBLPROPERTIES ( { property_key [ = ] property_val } [, ...] )' - SetTblProperties { - table_properties: Vec, - }, + SetTblProperties { table_properties: Vec }, + /// `OWNER TO { | CURRENT_ROLE | CURRENT_USER | SESSION_USER }` /// /// Note: this is PostgreSQL-specific - OwnerTo { - new_owner: Owner, - }, - /// Snowflake table clustering options - /// - ClusterBy { - exprs: Vec, - }, - DropClusteringKey, - SuspendRecluster, - ResumeRecluster, - /// `ALGORITHM [=] { DEFAULT | INSTANT | INPLACE | COPY }` - /// - /// [MySQL]-specific table alter algorithm. - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html - Algorithm { - equals: bool, - algorithm: AlterTableAlgorithm, - }, - - /// `LOCK [=] { DEFAULT | NONE | SHARED | EXCLUSIVE }` - /// - /// [MySQL]-specific table alter lock. - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html - Lock { - equals: bool, - lock: AlterTableLock, - }, - /// `AUTO_INCREMENT [=] ` - /// - /// [MySQL]-specific table option for raising current auto increment value. - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html - AutoIncrement { - equals: bool, - value: ValueWithSpan, - }, - /// `VALIDATE CONSTRAINT ` - ValidateConstraint { - name: Ident, - }, + OwnerTo { new_owner: Owner }, } /// An `ALTER Policy` (`Statement::AlterPolicy`) operation @@ -396,54 +278,6 @@ impl fmt::Display for AlterPolicyOperation { } } -/// [MySQL] `ALTER TABLE` algorithm. -/// -/// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum AlterTableAlgorithm { - Default, - Instant, - Inplace, - Copy, -} - -impl fmt::Display for AlterTableAlgorithm { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(match self { - Self::Default => "DEFAULT", - Self::Instant => "INSTANT", - Self::Inplace => "INPLACE", - Self::Copy => "COPY", - }) - } -} - -/// [MySQL] `ALTER TABLE` lock. -/// -/// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum AlterTableLock { - Default, - None, - Shared, - Exclusive, -} - -impl fmt::Display for AlterTableLock { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(match self { - Self::Default => "DEFAULT", - Self::None => "NONE", - Self::Shared => "SHARED", - Self::Exclusive => "EXCLUSIVE", - }) - } -} - #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -457,7 +291,7 @@ pub enum Owner { impl fmt::Display for Owner { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Owner::Ident(ident) => write!(f, "{ident}"), + Owner::Ident(ident) => write!(f, "{}", ident), Owner::CurrentRole => write!(f, "CURRENT_ROLE"), Owner::CurrentUser => write!(f, "CURRENT_USER"), Owner::SessionUser => write!(f, "SESSION_USER"), @@ -465,23 +299,6 @@ impl fmt::Display for Owner { } } -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum AlterConnectorOwner { - User(Ident), - Role(Ident), -} - -impl fmt::Display for AlterConnectorOwner { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - AlterConnectorOwner::User(ident) => write!(f, "USER {ident}"), - AlterConnectorOwner::Role(ident) => write!(f, "ROLE {ident}"), - } - } -} - #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -501,16 +318,7 @@ impl fmt::Display for AlterTableOperation { display_separated(new_partitions, " "), ine = if *if_not_exists { " IF NOT EXISTS" } else { "" } ), - AlterTableOperation::AddConstraint { - not_valid, - constraint, - } => { - write!(f, "ADD {constraint}")?; - if *not_valid { - write!(f, " NOT VALID")?; - } - Ok(()) - } + AlterTableOperation::AddConstraint(c) => write!(f, "ADD {c}"), AlterTableOperation::AddColumn { column_keyword, if_not_exists, @@ -541,22 +349,14 @@ impl fmt::Display for AlterTableOperation { if *if_not_exists { write!(f, " IF NOT EXISTS")?; } - write!(f, " {name} ({query})") - } - AlterTableOperation::Algorithm { equals, algorithm } => { - write!( - f, - "ALGORITHM {}{}", - if *equals { "= " } else { "" }, - algorithm - ) + write!(f, " {} ({})", name, query) } AlterTableOperation::DropProjection { if_exists, name } => { write!(f, "DROP PROJECTION")?; if *if_exists { write!(f, " IF EXISTS")?; } - write!(f, " {name}") + write!(f, " {}", name) } AlterTableOperation::MaterializeProjection { if_exists, @@ -567,9 +367,9 @@ impl fmt::Display for AlterTableOperation { if *if_exists { write!(f, " IF EXISTS")?; } - write!(f, " {name}")?; + write!(f, " {}", name)?; if let Some(partition) = partition { - write!(f, " IN PARTITION {partition}")?; + write!(f, " IN PARTITION {}", partition)?; } Ok(()) } @@ -582,9 +382,9 @@ impl fmt::Display for AlterTableOperation { if *if_exists { write!(f, " IF EXISTS")?; } - write!(f, " {name}")?; + write!(f, " {}", name)?; if let Some(partition) = partition { - write!(f, " IN PARTITION {partition}")?; + write!(f, " IN PARTITION {}", partition)?; } Ok(()) } @@ -612,39 +412,27 @@ impl fmt::Display for AlterTableOperation { AlterTableOperation::DropConstraint { if_exists, name, - drop_behavior, + cascade, } => { write!( f, "DROP CONSTRAINT {}{}{}", if *if_exists { "IF EXISTS " } else { "" }, name, - match drop_behavior { - None => "", - Some(DropBehavior::Restrict) => " RESTRICT", - Some(DropBehavior::Cascade) => " CASCADE", - } + if *cascade { " CASCADE" } else { "" }, ) } AlterTableOperation::DropPrimaryKey => write!(f, "DROP PRIMARY KEY"), - AlterTableOperation::DropForeignKey { name } => write!(f, "DROP FOREIGN KEY {name}"), - AlterTableOperation::DropIndex { name } => write!(f, "DROP INDEX {name}"), AlterTableOperation::DropColumn { - has_column_keyword, - column_names: column_name, + column_name, if_exists, - drop_behavior, + cascade, } => write!( f, - "DROP {}{}{}{}", - if *has_column_keyword { "COLUMN " } else { "" }, + "DROP COLUMN {}{}{}", if *if_exists { "IF EXISTS " } else { "" }, - display_comma_separated(column_name), - match drop_behavior { - None => "", - Some(DropBehavior::Restrict) => " RESTRICT", - Some(DropBehavior::Cascade) => " CASCADE", - } + column_name, + if *cascade { " CASCADE" } else { "" } ), AlterTableOperation::AttachPartition { partition } => { write!(f, "ATTACH {partition}") @@ -758,39 +546,6 @@ impl fmt::Display for AlterTableOperation { } Ok(()) } - AlterTableOperation::ClusterBy { exprs } => { - write!(f, "CLUSTER BY ({})", display_comma_separated(exprs))?; - Ok(()) - } - AlterTableOperation::DropClusteringKey => { - write!(f, "DROP CLUSTERING KEY")?; - Ok(()) - } - AlterTableOperation::SuspendRecluster => { - write!(f, "SUSPEND RECLUSTER")?; - Ok(()) - } - AlterTableOperation::ResumeRecluster => { - write!(f, "RESUME RECLUSTER")?; - Ok(()) - } - AlterTableOperation::AutoIncrement { equals, value } => { - write!( - f, - "AUTO_INCREMENT {}{}", - if *equals { "= " } else { "" }, - value - ) - } - AlterTableOperation::Lock { equals, lock } => { - write!(f, "LOCK {}{}", if *equals { "= " } else { "" }, lock) - } - AlterTableOperation::ReplicaIdentity { identity } => { - write!(f, "REPLICA IDENTITY {identity}") - } - AlterTableOperation::ValidateConstraint { name } => { - write!(f, "VALIDATE CONSTRAINT {name}") - } } } } @@ -805,95 +560,6 @@ impl fmt::Display for AlterIndexOperation { } } -/// An `ALTER TYPE` statement (`Statement::AlterType`) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct AlterType { - pub name: ObjectName, - pub operation: AlterTypeOperation, -} - -/// An [AlterType] operation -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum AlterTypeOperation { - Rename(AlterTypeRename), - AddValue(AlterTypeAddValue), - RenameValue(AlterTypeRenameValue), -} - -/// See [AlterTypeOperation::Rename] -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct AlterTypeRename { - pub new_name: Ident, -} - -/// See [AlterTypeOperation::AddValue] -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct AlterTypeAddValue { - pub if_not_exists: bool, - pub value: Ident, - pub position: Option, -} - -/// See [AlterTypeAddValue] -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum AlterTypeAddValuePosition { - Before(Ident), - After(Ident), -} - -/// See [AlterTypeOperation::RenameValue] -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct AlterTypeRenameValue { - pub from: Ident, - pub to: Ident, -} - -impl fmt::Display for AlterTypeOperation { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Self::Rename(AlterTypeRename { new_name }) => { - write!(f, "RENAME TO {new_name}") - } - Self::AddValue(AlterTypeAddValue { - if_not_exists, - value, - position, - }) => { - write!(f, "ADD VALUE")?; - if *if_not_exists { - write!(f, " IF NOT EXISTS")?; - } - write!(f, " {value}")?; - match position { - Some(AlterTypeAddValuePosition::Before(neighbor_value)) => { - write!(f, " BEFORE {neighbor_value}")?; - } - Some(AlterTypeAddValuePosition::After(neighbor_value)) => { - write!(f, " AFTER {neighbor_value}")?; - } - None => {} - }; - Ok(()) - } - Self::RenameValue(AlterTypeRenameValue { from, to }) => { - write!(f, "RENAME VALUE {from} TO {to}") - } - } - } -} - /// An `ALTER COLUMN` (`Statement::AlterTable`) operation #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -912,10 +578,7 @@ pub enum AlterColumnOperation { data_type: DataType, /// PostgreSQL specific using: Option, - /// Set to true if the statement includes the `SET DATA TYPE` keywords - had_set: bool, }, - /// `ADD GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( sequence_options ) ]` /// /// Note: this is a PostgreSQL-specific operation. @@ -933,22 +596,15 @@ impl fmt::Display for AlterColumnOperation { AlterColumnOperation::SetDefault { value } => { write!(f, "SET DEFAULT {value}") } - AlterColumnOperation::DropDefault => { + AlterColumnOperation::DropDefault {} => { write!(f, "DROP DEFAULT") } - AlterColumnOperation::SetDataType { - data_type, - using, - had_set, - } => { - if *had_set { - write!(f, "SET DATA ")?; - } - write!(f, "TYPE {data_type}")?; + AlterColumnOperation::SetDataType { data_type, using } => { if let Some(expr) = using { - write!(f, " USING {expr}")?; + write!(f, "SET DATA TYPE {data_type} USING {expr}") + } else { + write!(f, "SET DATA TYPE {data_type}") } - Ok(()) } AlterColumnOperation::AddGenerated { generated_as, @@ -1008,11 +664,9 @@ pub enum TableConstraint { /// [1]: IndexType index_type: Option, /// Identifiers of the columns that are unique. - columns: Vec, + columns: Vec, index_options: Vec, characteristics: Option, - /// Optional Postgres nulls handling: `[ NULLS [ NOT ] DISTINCT ]` - nulls_distinct: NullsDistinctOption, }, /// MySQL [definition][1] for `PRIMARY KEY` constraints statements:\ /// * `[CONSTRAINT []] PRIMARY KEY [index_name] [index_type] () ` @@ -1044,7 +698,7 @@ pub enum TableConstraint { /// [1]: IndexType index_type: Option, /// Identifiers of the columns that form the primary key. - columns: Vec, + columns: Vec, index_options: Vec, characteristics: Option, }, @@ -1055,9 +709,6 @@ pub enum TableConstraint { /// }`). ForeignKey { name: Option, - /// MySQL-specific field - /// - index_name: Option, columns: Vec, foreign_table: ObjectName, referred_columns: Vec, @@ -1065,13 +716,10 @@ pub enum TableConstraint { on_update: Option, characteristics: Option, }, - /// `[ CONSTRAINT ] CHECK () [[NOT] ENFORCED]` + /// `[ CONSTRAINT ] CHECK ()` Check { name: Option, expr: Box, - /// MySQL-specific syntax - /// - enforced: Option, }, /// MySQLs [index definition][1] for index creation. Not present on ANSI so, for now, the usage /// is restricted to MySQL, as no other dialects that support this syntax were found. @@ -1089,7 +737,7 @@ pub enum TableConstraint { /// [1]: IndexType index_type: Option, /// Referred column identifier list. - columns: Vec, + columns: Vec, }, /// MySQLs [fulltext][1] definition. Since the [`SPATIAL`][2] definition is exactly the same, /// and MySQL displays both the same way, it is part of this definition as well. @@ -1112,7 +760,7 @@ pub enum TableConstraint { /// Optional index name. opt_index_name: Option, /// Referred column identifier list. - columns: Vec, + columns: Vec, }, } @@ -1127,11 +775,10 @@ impl fmt::Display for TableConstraint { columns, index_options, characteristics, - nulls_distinct, } => { write!( f, - "{}UNIQUE{nulls_distinct}{index_type_display:>}{}{} ({})", + "{}UNIQUE{index_type_display:>}{}{} ({})", display_constraint_name(name), display_option_spaced(index_name), display_option(" USING ", "", index_type), @@ -1171,7 +818,6 @@ impl fmt::Display for TableConstraint { } TableConstraint::ForeignKey { name, - index_name, columns, foreign_table, referred_columns, @@ -1181,15 +827,12 @@ impl fmt::Display for TableConstraint { } => { write!( f, - "{}FOREIGN KEY{} ({}) REFERENCES {}", + "{}FOREIGN KEY ({}) REFERENCES {}({})", display_constraint_name(name), - display_option_spaced(index_name), display_comma_separated(columns), foreign_table, + display_comma_separated(referred_columns), )?; - if !referred_columns.is_empty() { - write!(f, "({})", display_comma_separated(referred_columns))?; - } if let Some(action) = on_delete { write!(f, " ON DELETE {action}")?; } @@ -1197,21 +840,12 @@ impl fmt::Display for TableConstraint { write!(f, " ON UPDATE {action}")?; } if let Some(characteristics) = characteristics { - write!(f, " {characteristics}")?; + write!(f, " {}", characteristics)?; } Ok(()) } - TableConstraint::Check { - name, - expr, - enforced, - } => { - write!(f, "{}CHECK ({})", display_constraint_name(name), expr)?; - if let Some(b) = enforced { - write!(f, " {}", if *b { "ENFORCED" } else { "NOT ENFORCED" }) - } else { - Ok(()) - } + TableConstraint::Check { name, expr } => { + write!(f, "{}CHECK ({})", display_constraint_name(name), expr) } TableConstraint::Index { display_as_key, @@ -1311,20 +945,13 @@ impl fmt::Display for KeyOrIndexDisplay { /// [1]: https://dev.mysql.com/doc/refman/8.0/en/create-table.html /// [2]: https://dev.mysql.com/doc/refman/8.0/en/create-index.html /// [3]: https://www.postgresql.org/docs/14/sql-createindex.html -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum IndexType { BTree, Hash, - GIN, - GiST, - SPGiST, - BRIN, - Bloom, - /// Users may define their own index types, which would - /// not be covered by the above variants. - Custom(Ident), + // TODO add Postgresql's possible indexes } impl fmt::Display for IndexType { @@ -1332,12 +959,6 @@ impl fmt::Display for IndexType { match self { Self::BTree => write!(f, "BTREE"), Self::Hash => write!(f, "HASH"), - Self::GIN => write!(f, "GIN"), - Self::GiST => write!(f, "GIST"), - Self::SPGiST => write!(f, "SPGIST"), - Self::BRIN => write!(f, "BRIN"), - Self::Bloom => write!(f, "BLOOM"), - Self::Custom(name) => write!(f, "{name}"), } } } @@ -1365,47 +986,17 @@ impl fmt::Display for IndexOption { } } -/// [PostgreSQL] unique index nulls handling option: `[ NULLS [ NOT ] DISTINCT ]` -/// -/// [PostgreSQL]: https://www.postgresql.org/docs/17/sql-altertable.html -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum NullsDistinctOption { - /// Not specified - None, - /// NULLS DISTINCT - Distinct, - /// NULLS NOT DISTINCT - NotDistinct, -} - -impl fmt::Display for NullsDistinctOption { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Self::None => Ok(()), - Self::Distinct => write!(f, " NULLS DISTINCT"), - Self::NotDistinct => write!(f, " NULLS NOT DISTINCT"), - } - } -} - #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct ProcedureParam { pub name: Ident, pub data_type: DataType, - pub mode: Option, } impl fmt::Display for ProcedureParam { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - if let Some(mode) = &self.mode { - write!(f, "{mode} {} {}", self.name, self.data_type) - } else { - write!(f, "{} {}", self.name, self.data_type) - } + write!(f, "{} {}", self.name, self.data_type) } } @@ -1416,6 +1007,7 @@ impl fmt::Display for ProcedureParam { pub struct ColumnDef { pub name: Ident, pub data_type: DataType, + pub collation: Option, pub options: Vec, } @@ -1426,6 +1018,9 @@ impl fmt::Display for ColumnDef { } else { write!(f, "{} {}", self.name, self.data_type)?; } + if let Some(collation) = &self.collation { + write!(f, " COLLATE {collation}")?; + } for option in &self.options { write!(f, " {option}")?; } @@ -1455,41 +1050,17 @@ impl fmt::Display for ColumnDef { pub struct ViewColumnDef { pub name: Ident, pub data_type: Option, - pub options: Option, -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum ColumnOptions { - CommaSeparated(Vec), - SpaceSeparated(Vec), -} - -impl ColumnOptions { - pub fn as_slice(&self) -> &[ColumnOption] { - match self { - ColumnOptions::CommaSeparated(options) => options.as_slice(), - ColumnOptions::SpaceSeparated(options) => options.as_slice(), - } - } + pub options: Option>, } impl fmt::Display for ViewColumnDef { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.name)?; if let Some(data_type) = self.data_type.as_ref() { - write!(f, " {data_type}")?; + write!(f, " {}", data_type)?; } if let Some(options) = self.options.as_ref() { - match options { - ColumnOptions::CommaSeparated(column_options) => { - write!(f, " {}", display_comma_separated(column_options.as_slice()))?; - } - ColumnOptions::SpaceSeparated(column_options) => { - write!(f, " {}", display_separated(column_options.as_slice(), " "))? - } - } + write!(f, " {}", display_comma_separated(options.as_slice()))?; } Ok(()) } @@ -1709,7 +1280,7 @@ pub struct ColumnPolicyProperty { /// ``` /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table pub with: bool, - pub policy_name: ObjectName, + pub policy_name: Ident, pub using_columns: Option>, } @@ -1756,18 +1327,15 @@ pub enum ColumnOption { /// `DEFAULT ` Default(Expr), - /// `MATERIALIZE ` + /// ClickHouse supports `MATERIALIZE`, `EPHEMERAL` and `ALIAS` expr to generate default values. /// Syntax: `b INT MATERIALIZE (a + 1)` - /// /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/create/table#default_values) + + /// `MATERIALIZE ` Materialized(Expr), /// `EPHEMERAL []` - /// - /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/create/table#default_values) Ephemeral(Option), /// `ALIAS ` - /// - /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/create/table#default_values) Alias(Expr), /// `{ PRIMARY KEY | UNIQUE } []` @@ -1796,7 +1364,6 @@ pub enum ColumnOption { /// - ... DialectSpecific(Vec), CharacterSet(ObjectName), - Collation(ObjectName), Comment(String), OnUpdate(Expr), /// `Generated`s are modifiers that follow a column definition in a `CREATE @@ -1843,13 +1410,6 @@ pub enum ColumnOption { /// ``` /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table Tags(TagsColumnOption), - /// MySQL specific: Spatial reference identifier - /// Syntax: - /// ```sql - /// CREATE TABLE geom (g GEOMETRY NOT NULL SRID 4326); - /// ``` - /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/creating-spatial-indexes.html - Srid(Box), } impl fmt::Display for ColumnOption { @@ -1874,7 +1434,7 @@ impl fmt::Display for ColumnOption { } => { write!(f, "{}", if *is_primary { "PRIMARY KEY" } else { "UNIQUE" })?; if let Some(characteristics) = characteristics { - write!(f, " {characteristics}")?; + write!(f, " {}", characteristics)?; } Ok(()) } @@ -1896,14 +1456,13 @@ impl fmt::Display for ColumnOption { write!(f, " ON UPDATE {action}")?; } if let Some(characteristics) = characteristics { - write!(f, " {characteristics}")?; + write!(f, " {}", characteristics)?; } Ok(()) } Check(expr) => write!(f, "CHECK ({expr})"), DialectSpecific(val) => write!(f, "{}", display_separated(val, " ")), CharacterSet(n) => write!(f, "CHARACTER SET {n}"), - Collation(n) => write!(f, "COLLATE {n}"), Comment(v) => write!(f, "COMMENT '{}'", escape_single_quote_string(v)), OnUpdate(expr) => write!(f, "ON UPDATE {expr}"), Generated { @@ -1956,7 +1515,7 @@ impl fmt::Display for ColumnOption { write!(f, "{parameters}") } OnConflict(keyword) => { - write!(f, "ON CONFLICT {keyword:?}")?; + write!(f, "ON CONFLICT {:?}", keyword)?; Ok(()) } Policy(parameters) => { @@ -1965,9 +1524,6 @@ impl fmt::Display for ColumnOption { Tags(tags) => { write!(f, "{tags}") } - Srid(srid) => { - write!(f, "SRID {srid}") - } } } } @@ -1996,7 +1552,7 @@ pub enum GeneratedExpressionMode { #[must_use] fn display_constraint_name(name: &'_ Option) -> impl fmt::Display + '_ { struct ConstraintName<'a>(&'a Option); - impl fmt::Display for ConstraintName<'_> { + impl<'a> fmt::Display for ConstraintName<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(name) = self.0 { write!(f, "CONSTRAINT {name} ")?; @@ -2017,7 +1573,7 @@ fn display_option<'a, T: fmt::Display>( option: &'a Option, ) -> impl fmt::Display + 'a { struct OptionDisplay<'a, T>(&'a str, &'a str, &'a Option); - impl fmt::Display for OptionDisplay<'_, T> { + impl<'a, T: fmt::Display> fmt::Display for OptionDisplay<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(inner) = self.2 { let (prefix, postfix) = (self.0, self.1); @@ -2142,26 +1698,6 @@ impl fmt::Display for ReferentialAction { } } -/// ` ::= CASCADE | RESTRICT`. -/// -/// Used in `DROP` statements. -#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum DropBehavior { - Restrict, - Cascade, -} - -impl fmt::Display for DropBehavior { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(match self { - DropBehavior::Restrict => "RESTRICT", - DropBehavior::Cascade => "CASCADE", - }) - } -} - /// SQL user defined type definition #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -2280,247 +1816,3 @@ impl fmt::Display for ClusteredBy { write!(f, " INTO {} BUCKETS", self.num_buckets) } } - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -/// ```sql -/// CREATE DOMAIN name [ AS ] data_type -/// [ COLLATE collation ] -/// [ DEFAULT expression ] -/// [ domain_constraint [ ... ] ] -/// -/// where domain_constraint is: -/// -/// [ CONSTRAINT constraint_name ] -/// { NOT NULL | NULL | CHECK (expression) } -/// ``` -/// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createdomain.html) -pub struct CreateDomain { - /// The name of the domain to be created. - pub name: ObjectName, - /// The data type of the domain. - pub data_type: DataType, - /// The collation of the domain. - pub collation: Option, - /// The default value of the domain. - pub default: Option, - /// The constraints of the domain. - pub constraints: Vec, -} - -impl fmt::Display for CreateDomain { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!( - f, - "CREATE DOMAIN {name} AS {data_type}", - name = self.name, - data_type = self.data_type - )?; - if let Some(collation) = &self.collation { - write!(f, " COLLATE {collation}")?; - } - if let Some(default) = &self.default { - write!(f, " DEFAULT {default}")?; - } - if !self.constraints.is_empty() { - write!(f, " {}", display_separated(&self.constraints, " "))?; - } - Ok(()) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct CreateFunction { - /// True if this is a `CREATE OR ALTER FUNCTION` statement - /// - /// [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql?view=sql-server-ver16#or-alter) - pub or_alter: bool, - pub or_replace: bool, - pub temporary: bool, - pub if_not_exists: bool, - pub name: ObjectName, - pub args: Option>, - pub return_type: Option, - /// The expression that defines the function. - /// - /// Examples: - /// ```sql - /// AS ((SELECT 1)) - /// AS "console.log();" - /// ``` - pub function_body: Option, - /// Behavior attribute for the function - /// - /// IMMUTABLE | STABLE | VOLATILE - /// - /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-createfunction.html) - pub behavior: Option, - /// CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT - /// - /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-createfunction.html) - pub called_on_null: Option, - /// PARALLEL { UNSAFE | RESTRICTED | SAFE } - /// - /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-createfunction.html) - pub parallel: Option, - /// USING ... (Hive only) - pub using: Option, - /// Language used in a UDF definition. - /// - /// Example: - /// ```sql - /// CREATE FUNCTION foo() LANGUAGE js AS "console.log();" - /// ``` - /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_a_javascript_udf) - pub language: Option, - /// Determinism keyword used for non-sql UDF definitions. - /// - /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_11) - pub determinism_specifier: Option, - /// List of options for creating the function. - /// - /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_11) - pub options: Option>, - /// Connection resource for a remote function. - /// - /// Example: - /// ```sql - /// CREATE FUNCTION foo() - /// RETURNS FLOAT64 - /// REMOTE WITH CONNECTION us.myconnection - /// ``` - /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_a_remote_function) - pub remote_connection: Option, -} - -impl fmt::Display for CreateFunction { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!( - f, - "CREATE {or_alter}{or_replace}{temp}FUNCTION {if_not_exists}{name}", - name = self.name, - temp = if self.temporary { "TEMPORARY " } else { "" }, - or_alter = if self.or_alter { "OR ALTER " } else { "" }, - or_replace = if self.or_replace { "OR REPLACE " } else { "" }, - if_not_exists = if self.if_not_exists { - "IF NOT EXISTS " - } else { - "" - }, - )?; - if let Some(args) = &self.args { - write!(f, "({})", display_comma_separated(args))?; - } - if let Some(return_type) = &self.return_type { - write!(f, " RETURNS {return_type}")?; - } - if let Some(determinism_specifier) = &self.determinism_specifier { - write!(f, " {determinism_specifier}")?; - } - if let Some(language) = &self.language { - write!(f, " LANGUAGE {language}")?; - } - if let Some(behavior) = &self.behavior { - write!(f, " {behavior}")?; - } - if let Some(called_on_null) = &self.called_on_null { - write!(f, " {called_on_null}")?; - } - if let Some(parallel) = &self.parallel { - write!(f, " {parallel}")?; - } - if let Some(remote_connection) = &self.remote_connection { - write!(f, " REMOTE WITH CONNECTION {remote_connection}")?; - } - if let Some(CreateFunctionBody::AsBeforeOptions(function_body)) = &self.function_body { - write!(f, " AS {function_body}")?; - } - if let Some(CreateFunctionBody::Return(function_body)) = &self.function_body { - write!(f, " RETURN {function_body}")?; - } - if let Some(CreateFunctionBody::AsReturnExpr(function_body)) = &self.function_body { - write!(f, " AS RETURN {function_body}")?; - } - if let Some(CreateFunctionBody::AsReturnSelect(function_body)) = &self.function_body { - write!(f, " AS RETURN {function_body}")?; - } - if let Some(using) = &self.using { - write!(f, " {using}")?; - } - if let Some(options) = &self.options { - write!( - f, - " OPTIONS({})", - display_comma_separated(options.as_slice()) - )?; - } - if let Some(CreateFunctionBody::AsAfterOptions(function_body)) = &self.function_body { - write!(f, " AS {function_body}")?; - } - if let Some(CreateFunctionBody::AsBeginEnd(bes)) = &self.function_body { - write!(f, " AS {bes}")?; - } - Ok(()) - } -} - -/// ```sql -/// CREATE CONNECTOR [IF NOT EXISTS] connector_name -/// [TYPE datasource_type] -/// [URL datasource_url] -/// [COMMENT connector_comment] -/// [WITH DCPROPERTIES(property_name=property_value, ...)] -/// ``` -/// -/// [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct CreateConnector { - pub name: Ident, - pub if_not_exists: bool, - pub connector_type: Option, - pub url: Option, - pub comment: Option, - pub with_dcproperties: Option>, -} - -impl fmt::Display for CreateConnector { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!( - f, - "CREATE CONNECTOR {if_not_exists}{name}", - if_not_exists = if self.if_not_exists { - "IF NOT EXISTS " - } else { - "" - }, - name = self.name, - )?; - - if let Some(connector_type) = &self.connector_type { - write!(f, " TYPE '{connector_type}'")?; - } - - if let Some(url) = &self.url { - write!(f, " URL '{url}'")?; - } - - if let Some(comment) = &self.comment { - write!(f, " COMMENT = '{comment}'")?; - } - - if let Some(with_dcproperties) = &self.with_dcproperties { - write!( - f, - " WITH DCPROPERTIES({})", - display_comma_separated(with_dcproperties) - )?; - } - - Ok(()) - } -} diff --git a/src/ast/dml.rs b/src/ast/dml.rs index e179f5d7..2932fafb 100644 --- a/src/ast/dml.rs +++ b/src/ast/dml.rs @@ -29,38 +29,16 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; -use crate::display_utils::{indented_list, DisplayCommaSeparated, Indent, NewLine, SpaceOrNewline}; - pub use super::ddl::{ColumnDef, TableConstraint}; use super::{ - display_comma_separated, display_separated, query::InputFormatClause, Assignment, ClusteredBy, - CommentDef, CreateTableOptions, Expr, FileFormat, FromTable, HiveDistributionStyle, HiveFormat, - HiveIOFormat, HiveRowFormat, Ident, IndexType, InsertAliases, MysqlInsertPriority, ObjectName, - OnCommit, OnInsert, OneOrManyWithParens, OrderByExpr, Query, RowAccessPolicy, SelectItem, - Setting, SqliteOnConflict, StorageSerializationPolicy, TableObject, TableWithJoins, Tag, - WrappedCollection, + display_comma_separated, display_separated, ClusteredBy, CommentDef, Expr, FileFormat, + FromTable, HiveDistributionStyle, HiveFormat, HiveIOFormat, HiveRowFormat, Ident, + InsertAliases, MysqlInsertPriority, ObjectName, OnCommit, OnInsert, OneOrManyWithParens, + OrderByExpr, Query, RowAccessPolicy, SelectItem, SqlOption, SqliteOnConflict, TableEngine, + TableWithJoins, Tag, WrappedCollection, }; -/// Index column type. -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct IndexColumn { - pub column: OrderByExpr, - pub operator_class: Option, -} - -impl Display for IndexColumn { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.column)?; - if let Some(operator_class) = &self.operator_class { - write!(f, " {operator_class}")?; - } - Ok(()) - } -} - /// CREATE INDEX statement. #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -70,8 +48,8 @@ pub struct CreateIndex { pub name: Option, #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] pub table_name: ObjectName, - pub using: Option, - pub columns: Vec, + pub using: Option, + pub columns: Vec, pub unique: bool, pub concurrently: bool, pub if_not_exists: bool, @@ -139,7 +117,6 @@ pub struct CreateTable { pub if_not_exists: bool, pub transient: bool, pub volatile: bool, - pub iceberg: bool, /// Table name #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] pub name: ObjectName, @@ -148,17 +125,19 @@ pub struct CreateTable { pub constraints: Vec, pub hive_distribution: HiveDistributionStyle, pub hive_formats: Option, - pub table_options: CreateTableOptions, + pub table_properties: Vec, + pub with_options: Vec, pub file_format: Option, pub location: Option, pub query: Option>, pub without_rowid: bool, pub like: Option, pub clone: Option, - // For Hive dialect, the table comment is after the column definitions without `=`, - // so the `comment` field is optional and different than the comment field in the general options list. - // [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable) + pub engine: Option, pub comment: Option, + pub auto_increment_offset: Option, + pub default_charset: Option, + pub collation: Option, pub on_commit: Option, /// ClickHouse "ON CLUSTER" clause: /// @@ -175,17 +154,13 @@ pub struct CreateTable { pub partition_by: Option>, /// BigQuery: Table clustering column list. /// - /// Snowflake: Table clustering list which contains base column, expressions on base columns. - /// - pub cluster_by: Option>>, + pub cluster_by: Option>>, /// Hive: Table clustering column list. /// pub clustered_by: Option, - /// Postgres `INHERITs` clause, which contains the list of tables from which - /// the new table inherits. - /// - /// - pub inherits: Option>, + /// BigQuery: Table options list. + /// + pub options: Option>, /// SQLite "STRICT" clause. /// if the "STRICT" table-option keyword is added to the end, after the closing ")", /// then strict typing rules apply to that table. @@ -217,21 +192,6 @@ pub struct CreateTable { /// Snowflake "WITH TAG" clause /// pub with_tags: Option>, - /// Snowflake "EXTERNAL_VOLUME" clause for Iceberg tables - /// - pub external_volume: Option, - /// Snowflake "BASE_LOCATION" clause for Iceberg tables - /// - pub base_location: Option, - /// Snowflake "CATALOG" clause for Iceberg tables - /// - pub catalog: Option, - /// Snowflake "CATALOG_SYNC" clause for Iceberg tables - /// - pub catalog_sync: Option, - /// Snowflake "STORAGE_SERIALIZATION_POLICY" clause for Iceberg tables - /// - pub storage_serialization_policy: Option, } impl Display for CreateTable { @@ -245,7 +205,7 @@ impl Display for CreateTable { // `CREATE TABLE t (a INT) AS SELECT a from t2` write!( f, - "CREATE {or_replace}{external}{global}{temporary}{transient}{volatile}{iceberg}TABLE {if_not_exists}{name}", + "CREATE {or_replace}{external}{global}{temporary}{transient}{volatile}TABLE {if_not_exists}{name}", or_replace = if self.or_replace { "OR REPLACE " } else { "" }, external = if self.external { "EXTERNAL " } else { "" }, global = self.global @@ -261,32 +221,25 @@ impl Display for CreateTable { temporary = if self.temporary { "TEMPORARY " } else { "" }, transient = if self.transient { "TRANSIENT " } else { "" }, volatile = if self.volatile { "VOLATILE " } else { "" }, - // Only for Snowflake - iceberg = if self.iceberg { "ICEBERG " } else { "" }, name = self.name, )?; if let Some(on_cluster) = &self.on_cluster { - write!(f, " ON CLUSTER {on_cluster}")?; + write!(f, " ON CLUSTER {}", on_cluster)?; } if !self.columns.is_empty() || !self.constraints.is_empty() { - f.write_str(" (")?; - NewLine.fmt(f)?; - Indent(DisplayCommaSeparated(&self.columns)).fmt(f)?; + write!(f, " ({}", display_comma_separated(&self.columns))?; if !self.columns.is_empty() && !self.constraints.is_empty() { - f.write_str(",")?; - SpaceOrNewline.fmt(f)?; + write!(f, ", ")?; } - Indent(DisplayCommaSeparated(&self.constraints)).fmt(f)?; - NewLine.fmt(f)?; - f.write_str(")")?; + write!(f, "{})", display_comma_separated(&self.constraints))?; } else if self.query.is_none() && self.like.is_none() && self.clone.is_none() { // PostgreSQL allows `CREATE TABLE t ();`, but requires empty parens - f.write_str(" ()")?; + write!(f, " ()")?; } // Hive table comment should be after column definitions, please refer to: // [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable) - if let Some(comment) = &self.comment { + if let Some(CommentDef::AfterColumnDefsWithoutEq(comment)) = &self.comment { write!(f, " COMMENT '{comment}'")?; } @@ -379,22 +332,40 @@ impl Display for CreateTable { } write!(f, " LOCATION '{}'", self.location.as_ref().unwrap())?; } - - match &self.table_options { - options @ CreateTableOptions::With(_) - | options @ CreateTableOptions::Plain(_) - | options @ CreateTableOptions::TableProperties(_) => write!(f, " {options}")?, - _ => (), + if !self.table_properties.is_empty() { + write!( + f, + " TBLPROPERTIES ({})", + display_comma_separated(&self.table_properties) + )?; + } + if !self.with_options.is_empty() { + write!(f, " WITH ({})", display_comma_separated(&self.with_options))?; + } + if let Some(engine) = &self.engine { + write!(f, " ENGINE={engine}")?; + } + if let Some(comment_def) = &self.comment { + match comment_def { + CommentDef::WithEq(comment) => { + write!(f, " COMMENT = '{comment}'")?; + } + CommentDef::WithoutEq(comment) => { + write!(f, " COMMENT '{comment}'")?; + } + // For CommentDef::AfterColumnDefsWithoutEq will be displayed after column definition + CommentDef::AfterColumnDefsWithoutEq(_) => (), + } } + if let Some(auto_increment_offset) = self.auto_increment_offset { + write!(f, " AUTO_INCREMENT {auto_increment_offset}")?; + } if let Some(primary_key) = &self.primary_key { - write!(f, " PRIMARY KEY {primary_key}")?; + write!(f, " PRIMARY KEY {}", primary_key)?; } if let Some(order_by) = &self.order_by { - write!(f, " ORDER BY {order_by}")?; - } - if let Some(inherits) = &self.inherits { - write!(f, " INHERITS ({})", display_comma_separated(inherits))?; + write!(f, " ORDER BY {}", order_by)?; } if let Some(partition_by) = self.partition_by.as_ref() { write!(f, " PARTITION BY {partition_by}")?; @@ -402,31 +373,12 @@ impl Display for CreateTable { if let Some(cluster_by) = self.cluster_by.as_ref() { write!(f, " CLUSTER BY {cluster_by}")?; } - if let options @ CreateTableOptions::Options(_) = &self.table_options { - write!(f, " {options}")?; - } - if let Some(external_volume) = self.external_volume.as_ref() { - write!(f, " EXTERNAL_VOLUME = '{external_volume}'")?; - } - if let Some(catalog) = self.catalog.as_ref() { - write!(f, " CATALOG = '{catalog}'")?; - } - - if self.iceberg { - if let Some(base_location) = self.base_location.as_ref() { - write!(f, " BASE_LOCATION = '{base_location}'")?; - } - } - - if let Some(catalog_sync) = self.catalog_sync.as_ref() { - write!(f, " CATALOG_SYNC = '{catalog_sync}'")?; - } - - if let Some(storage_serialization_policy) = self.storage_serialization_policy.as_ref() { + if let Some(options) = self.options.as_ref() { write!( f, - " STORAGE_SERIALIZATION_POLICY = {storage_serialization_policy}" + " OPTIONS({})", + display_comma_separated(options.as_slice()) )?; } @@ -480,6 +432,13 @@ impl Display for CreateTable { write!(f, " WITH TAG ({})", display_comma_separated(tag.as_slice()))?; } + if let Some(default_charset) = &self.default_charset { + write!(f, " DEFAULT CHARSET={default_charset}")?; + } + if let Some(collation) = &self.collation { + write!(f, " COLLATE={collation}")?; + } + if self.on_commit.is_some() { let on_commit = match self.on_commit { Some(OnCommit::DeleteRows) => "ON COMMIT DELETE ROWS", @@ -511,7 +470,8 @@ pub struct Insert { /// INTO - optional keyword pub into: bool, /// TABLE - pub table: TableObject, + #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] + pub table_name: ObjectName, /// table_name as foo (for PostgreSQL) pub table_alias: Option, /// COLUMNS @@ -520,15 +480,12 @@ pub struct Insert { pub overwrite: bool, /// A SQL query that specifies what to insert pub source: Option>, - /// MySQL `INSERT INTO ... SET` - /// See: - pub assignments: Vec, /// partitioned insert (Hive) pub partitioned: Option>, /// Columns defined after PARTITION pub after_columns: Vec, /// whether the insert has the table keyword (Hive) - pub has_table_keyword: bool, + pub table: bool, pub on: Option, /// RETURNING pub returning: Option>, @@ -538,31 +495,18 @@ pub struct Insert { pub priority: Option, /// Only for mysql pub insert_alias: Option, - /// Settings used for ClickHouse. - /// - /// ClickHouse syntax: `INSERT INTO tbl SETTINGS format_template_resultset = '/some/path/resultset.format'` - /// - /// [ClickHouse `INSERT INTO`](https://clickhouse.com/docs/en/sql-reference/statements/insert-into) - pub settings: Option>, - /// Format for `INSERT` statement when not using standard SQL format. Can be e.g. `CSV`, - /// `JSON`, `JSONAsString`, `LineAsString` and more. - /// - /// ClickHouse syntax: `INSERT INTO tbl FORMAT JSONEachRow {"foo": 1, "bar": 2}, {"foo": 3}` - /// - /// [ClickHouse formats JSON insert](https://clickhouse.com/docs/en/interfaces/formats#json-inserting-data) - pub format_clause: Option, } impl Display for Insert { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let table_name = if let Some(alias) = &self.table_alias { - format!("{0} AS {alias}", self.table) + format!("{0} AS {alias}", self.table_name) } else { - self.table.to_string() + self.table_name.to_string() }; - if let Some(on_conflict) = self.or { - write!(f, "INSERT {on_conflict} INTO {table_name} ")?; + if let Some(action) = self.or { + write!(f, "INSERT OR {action} INTO {table_name} ")?; } else { write!( f, @@ -584,37 +528,26 @@ impl Display for Insert { ignore = if self.ignore { " IGNORE" } else { "" }, over = if self.overwrite { " OVERWRITE" } else { "" }, int = if self.into { " INTO" } else { "" }, - tbl = if self.has_table_keyword { " TABLE" } else { "" }, + tbl = if self.table { " TABLE" } else { "" }, )?; } if !self.columns.is_empty() { - write!(f, "({})", display_comma_separated(&self.columns))?; - SpaceOrNewline.fmt(f)?; + write!(f, "({}) ", display_comma_separated(&self.columns))?; } if let Some(ref parts) = self.partitioned { if !parts.is_empty() { - write!(f, "PARTITION ({})", display_comma_separated(parts))?; - SpaceOrNewline.fmt(f)?; + write!(f, "PARTITION ({}) ", display_comma_separated(parts))?; } } if !self.after_columns.is_empty() { - write!(f, "({})", display_comma_separated(&self.after_columns))?; - SpaceOrNewline.fmt(f)?; - } - - if let Some(settings) = &self.settings { - write!(f, "SETTINGS {}", display_comma_separated(settings))?; - SpaceOrNewline.fmt(f)?; + write!(f, "({}) ", display_comma_separated(&self.after_columns))?; } if let Some(source) = &self.source { - source.fmt(f)?; - } else if !self.assignments.is_empty() { - write!(f, "SET")?; - indented_list(f, &self.assignments)?; - } else if let Some(format_clause) = &self.format_clause { - format_clause.fmt(f)?; - } else if self.columns.is_empty() { + write!(f, "{source}")?; + } + + if self.source.is_none() && self.columns.is_empty() { write!(f, "DEFAULT VALUES")?; } @@ -633,9 +566,7 @@ impl Display for Insert { } if let Some(returning) = &self.returning { - SpaceOrNewline.fmt(f)?; - f.write_str("RETURNING")?; - indented_list(f, returning)?; + write!(f, " RETURNING {}", display_comma_separated(returning))?; } Ok(()) } @@ -664,45 +595,32 @@ pub struct Delete { impl Display for Delete { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("DELETE")?; + write!(f, "DELETE ")?; if !self.tables.is_empty() { - indented_list(f, &self.tables)?; + write!(f, "{} ", display_comma_separated(&self.tables))?; } match &self.from { FromTable::WithFromKeyword(from) => { - f.write_str(" FROM")?; - indented_list(f, from)?; + write!(f, "FROM {}", display_comma_separated(from))?; } FromTable::WithoutKeyword(from) => { - indented_list(f, from)?; + write!(f, "{}", display_comma_separated(from))?; } } if let Some(using) = &self.using { - SpaceOrNewline.fmt(f)?; - f.write_str("USING")?; - indented_list(f, using)?; + write!(f, " USING {}", display_comma_separated(using))?; } if let Some(selection) = &self.selection { - SpaceOrNewline.fmt(f)?; - f.write_str("WHERE")?; - SpaceOrNewline.fmt(f)?; - Indent(selection).fmt(f)?; + write!(f, " WHERE {selection}")?; } if let Some(returning) = &self.returning { - SpaceOrNewline.fmt(f)?; - f.write_str("RETURNING")?; - indented_list(f, returning)?; + write!(f, " RETURNING {}", display_comma_separated(returning))?; } if !self.order_by.is_empty() { - SpaceOrNewline.fmt(f)?; - f.write_str("ORDER BY")?; - indented_list(f, &self.order_by)?; + write!(f, " ORDER BY {}", display_comma_separated(&self.order_by))?; } if let Some(limit) = &self.limit { - SpaceOrNewline.fmt(f)?; - f.write_str("LIMIT")?; - SpaceOrNewline.fmt(f)?; - Indent(limit).fmt(f)?; + write!(f, " LIMIT {limit}")?; } Ok(()) } diff --git a/src/ast/helpers/attached_token.rs b/src/ast/helpers/attached_token.rs deleted file mode 100644 index 6b930b51..00000000 --- a/src/ast/helpers/attached_token.rs +++ /dev/null @@ -1,136 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -use core::cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd}; -use core::fmt::{self, Debug, Formatter}; -use core::hash::{Hash, Hasher}; - -use crate::tokenizer::TokenWithSpan; - -#[cfg(feature = "serde")] -use serde::{Deserialize, Serialize}; - -#[cfg(feature = "visitor")] -use sqlparser_derive::{Visit, VisitMut}; - -/// A wrapper over [`TokenWithSpan`]s that ignores the token and source -/// location in comparisons and hashing. -/// -/// This type is used when the token and location is not relevant for semantics, -/// but is still needed for accurate source location tracking, for example, in -/// the nodes in the [ast](crate::ast) module. -/// -/// Note: **All** `AttachedTokens` are equal. -/// -/// # Examples -/// -/// Same token, different location are equal -/// ``` -/// # use sqlparser::ast::helpers::attached_token::AttachedToken; -/// # use sqlparser::tokenizer::{Location, Span, Token, TokenWithLocation}; -/// // commas @ line 1, column 10 -/// let tok1 = TokenWithLocation::new( -/// Token::Comma, -/// Span::new(Location::new(1, 10), Location::new(1, 11)), -/// ); -/// // commas @ line 2, column 20 -/// let tok2 = TokenWithLocation::new( -/// Token::Comma, -/// Span::new(Location::new(2, 20), Location::new(2, 21)), -/// ); -/// -/// assert_ne!(tok1, tok2); // token with locations are *not* equal -/// assert_eq!(AttachedToken(tok1), AttachedToken(tok2)); // attached tokens are -/// ``` -/// -/// Different token, different location are equal 🤯 -/// -/// ``` -/// # use sqlparser::ast::helpers::attached_token::AttachedToken; -/// # use sqlparser::tokenizer::{Location, Span, Token, TokenWithLocation}; -/// // commas @ line 1, column 10 -/// let tok1 = TokenWithLocation::new( -/// Token::Comma, -/// Span::new(Location::new(1, 10), Location::new(1, 11)), -/// ); -/// // period @ line 2, column 20 -/// let tok2 = TokenWithLocation::new( -/// Token::Period, -/// Span::new(Location::new(2, 10), Location::new(2, 21)), -/// ); -/// -/// assert_ne!(tok1, tok2); // token with locations are *not* equal -/// assert_eq!(AttachedToken(tok1), AttachedToken(tok2)); // attached tokens are -/// ``` -/// // period @ line 2, column 20 -#[derive(Clone)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct AttachedToken(pub TokenWithSpan); - -impl AttachedToken { - /// Return a new Empty AttachedToken - pub fn empty() -> Self { - AttachedToken(TokenWithSpan::new_eof()) - } -} - -// Conditional Implementations -impl Debug for AttachedToken { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -// Blanket Implementations -impl PartialEq for AttachedToken { - fn eq(&self, _: &Self) -> bool { - true - } -} - -impl Eq for AttachedToken {} - -impl PartialOrd for AttachedToken { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for AttachedToken { - fn cmp(&self, _: &Self) -> Ordering { - Ordering::Equal - } -} - -impl Hash for AttachedToken { - fn hash(&self, _state: &mut H) { - // Do nothing - } -} - -impl From for AttachedToken { - fn from(value: TokenWithSpan) -> Self { - AttachedToken(value) - } -} - -impl From for TokenWithSpan { - fn from(value: AttachedToken) -> Self { - value.0 - } -} diff --git a/src/ast/helpers/key_value_options.rs b/src/ast/helpers/key_value_options.rs deleted file mode 100644 index 796bfd5e..00000000 --- a/src/ast/helpers/key_value_options.rs +++ /dev/null @@ -1,89 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -//! Key-value options for SQL statements. -//! See [this page](https://docs.snowflake.com/en/sql-reference/commands-data-loading) for more details. - -#[cfg(not(feature = "std"))] -use alloc::string::String; -#[cfg(not(feature = "std"))] -use alloc::vec::Vec; -use core::fmt; -use core::fmt::Formatter; - -#[cfg(feature = "serde")] -use serde::{Deserialize, Serialize}; - -#[cfg(feature = "visitor")] -use sqlparser_derive::{Visit, VisitMut}; - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct KeyValueOptions { - pub options: Vec, -} - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum KeyValueOptionType { - STRING, - BOOLEAN, - ENUM, - NUMBER, -} - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct KeyValueOption { - pub option_name: String, - pub option_type: KeyValueOptionType, - pub value: String, -} - -impl fmt::Display for KeyValueOptions { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - if !self.options.is_empty() { - let mut first = false; - for option in &self.options { - if !first { - first = true; - } else { - f.write_str(" ")?; - } - write!(f, "{option}")?; - } - } - Ok(()) - } -} - -impl fmt::Display for KeyValueOption { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.option_type { - KeyValueOptionType::STRING => { - write!(f, "{}='{}'", self.option_name, self.value)?; - } - KeyValueOptionType::ENUM | KeyValueOptionType::BOOLEAN | KeyValueOptionType::NUMBER => { - write!(f, "{}={}", self.option_name, self.value)?; - } - } - Ok(()) - } -} diff --git a/src/ast/helpers/mod.rs b/src/ast/helpers/mod.rs index 55831220..d6924ab8 100644 --- a/src/ast/helpers/mod.rs +++ b/src/ast/helpers/mod.rs @@ -14,7 +14,5 @@ // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. -pub mod attached_token; -pub mod key_value_options; pub mod stmt_create_table; pub mod stmt_data_loading; diff --git a/src/ast/helpers/stmt_create_table.rs b/src/ast/helpers/stmt_create_table.rs index d66a869b..364969c4 100644 --- a/src/ast/helpers/stmt_create_table.rs +++ b/src/ast/helpers/stmt_create_table.rs @@ -26,12 +26,10 @@ use sqlparser_derive::{Visit, VisitMut}; use super::super::dml::CreateTable; use crate::ast::{ - ClusteredBy, ColumnDef, CommentDef, CreateTableOptions, Expr, FileFormat, - HiveDistributionStyle, HiveFormat, Ident, ObjectName, OnCommit, OneOrManyWithParens, Query, - RowAccessPolicy, Statement, StorageSerializationPolicy, TableConstraint, Tag, - WrappedCollection, + ClusteredBy, ColumnDef, CommentDef, Expr, FileFormat, HiveDistributionStyle, HiveFormat, Ident, + ObjectName, OnCommit, OneOrManyWithParens, Query, RowAccessPolicy, SqlOption, Statement, + TableConstraint, TableEngine, Tag, WrappedCollection, }; - use crate::parser::ParserError; /// Builder for create table statement variant ([1]). @@ -44,11 +42,12 @@ use crate::parser::ParserError; /// ```rust /// use sqlparser::ast::helpers::stmt_create_table::CreateTableBuilder; /// use sqlparser::ast::{ColumnDef, DataType, Ident, ObjectName}; -/// let builder = CreateTableBuilder::new(ObjectName::from(vec![Ident::new("table_name")])) +/// let builder = CreateTableBuilder::new(ObjectName(vec![Ident::new("table_name")])) /// .if_not_exists(true) /// .columns(vec![ColumnDef { /// name: Ident::new("c1"), /// data_type: DataType::Int(None), +/// collation: None, /// options: vec![], /// }]); /// // You can access internal elements with ease @@ -72,27 +71,32 @@ pub struct CreateTableBuilder { pub if_not_exists: bool, pub transient: bool, pub volatile: bool, - pub iceberg: bool, pub name: ObjectName, pub columns: Vec, pub constraints: Vec, pub hive_distribution: HiveDistributionStyle, pub hive_formats: Option, + pub table_properties: Vec, + pub with_options: Vec, pub file_format: Option, pub location: Option, pub query: Option>, pub without_rowid: bool, pub like: Option, pub clone: Option, + pub engine: Option, pub comment: Option, + pub auto_increment_offset: Option, + pub default_charset: Option, + pub collation: Option, pub on_commit: Option, pub on_cluster: Option, pub primary_key: Option>, pub order_by: Option>, pub partition_by: Option>, - pub cluster_by: Option>>, + pub cluster_by: Option>>, pub clustered_by: Option, - pub inherits: Option>, + pub options: Option>, pub strict: bool, pub copy_grants: bool, pub enable_schema_evolution: Option, @@ -103,12 +107,6 @@ pub struct CreateTableBuilder { pub with_aggregation_policy: Option, pub with_row_access_policy: Option, pub with_tags: Option>, - pub base_location: Option, - pub external_volume: Option, - pub catalog: Option, - pub catalog_sync: Option, - pub storage_serialization_policy: Option, - pub table_options: CreateTableOptions, } impl CreateTableBuilder { @@ -121,19 +119,24 @@ impl CreateTableBuilder { if_not_exists: false, transient: false, volatile: false, - iceberg: false, name, columns: vec![], constraints: vec![], hive_distribution: HiveDistributionStyle::NONE, hive_formats: None, + table_properties: vec![], + with_options: vec![], file_format: None, location: None, query: None, without_rowid: false, like: None, clone: None, + engine: None, comment: None, + auto_increment_offset: None, + default_charset: None, + collation: None, on_commit: None, on_cluster: None, primary_key: None, @@ -141,7 +144,7 @@ impl CreateTableBuilder { partition_by: None, cluster_by: None, clustered_by: None, - inherits: None, + options: None, strict: false, copy_grants: false, enable_schema_evolution: None, @@ -152,12 +155,6 @@ impl CreateTableBuilder { with_aggregation_policy: None, with_row_access_policy: None, with_tags: None, - base_location: None, - external_volume: None, - catalog: None, - catalog_sync: None, - storage_serialization_policy: None, - table_options: CreateTableOptions::None, } } pub fn or_replace(mut self, or_replace: bool) -> Self { @@ -195,11 +192,6 @@ impl CreateTableBuilder { self } - pub fn iceberg(mut self, iceberg: bool) -> Self { - self.iceberg = iceberg; - self - } - pub fn columns(mut self, columns: Vec) -> Self { self.columns = columns; self @@ -220,6 +212,15 @@ impl CreateTableBuilder { self } + pub fn table_properties(mut self, table_properties: Vec) -> Self { + self.table_properties = table_properties; + self + } + + pub fn with_options(mut self, with_options: Vec) -> Self { + self.with_options = with_options; + self + } pub fn file_format(mut self, file_format: Option) -> Self { self.file_format = file_format; self @@ -249,11 +250,31 @@ impl CreateTableBuilder { self } - pub fn comment_after_column_def(mut self, comment: Option) -> Self { + pub fn engine(mut self, engine: Option) -> Self { + self.engine = engine; + self + } + + pub fn comment(mut self, comment: Option) -> Self { self.comment = comment; self } + pub fn auto_increment_offset(mut self, offset: Option) -> Self { + self.auto_increment_offset = offset; + self + } + + pub fn default_charset(mut self, default_charset: Option) -> Self { + self.default_charset = default_charset; + self + } + + pub fn collation(mut self, collation: Option) -> Self { + self.collation = collation; + self + } + pub fn on_commit(mut self, on_commit: Option) -> Self { self.on_commit = on_commit; self @@ -279,7 +300,7 @@ impl CreateTableBuilder { self } - pub fn cluster_by(mut self, cluster_by: Option>>) -> Self { + pub fn cluster_by(mut self, cluster_by: Option>>) -> Self { self.cluster_by = cluster_by; self } @@ -289,8 +310,8 @@ impl CreateTableBuilder { self } - pub fn inherits(mut self, inherits: Option>) -> Self { - self.inherits = inherits; + pub fn options(mut self, options: Option>) -> Self { + self.options = options; self } @@ -350,39 +371,6 @@ impl CreateTableBuilder { self } - pub fn base_location(mut self, base_location: Option) -> Self { - self.base_location = base_location; - self - } - - pub fn external_volume(mut self, external_volume: Option) -> Self { - self.external_volume = external_volume; - self - } - - pub fn catalog(mut self, catalog: Option) -> Self { - self.catalog = catalog; - self - } - - pub fn catalog_sync(mut self, catalog_sync: Option) -> Self { - self.catalog_sync = catalog_sync; - self - } - - pub fn storage_serialization_policy( - mut self, - storage_serialization_policy: Option, - ) -> Self { - self.storage_serialization_policy = storage_serialization_policy; - self - } - - pub fn table_options(mut self, table_options: CreateTableOptions) -> Self { - self.table_options = table_options; - self - } - pub fn build(self) -> Statement { Statement::CreateTable(CreateTable { or_replace: self.or_replace, @@ -392,19 +380,24 @@ impl CreateTableBuilder { if_not_exists: self.if_not_exists, transient: self.transient, volatile: self.volatile, - iceberg: self.iceberg, name: self.name, columns: self.columns, constraints: self.constraints, hive_distribution: self.hive_distribution, hive_formats: self.hive_formats, + table_properties: self.table_properties, + with_options: self.with_options, file_format: self.file_format, location: self.location, query: self.query, without_rowid: self.without_rowid, like: self.like, clone: self.clone, + engine: self.engine, comment: self.comment, + auto_increment_offset: self.auto_increment_offset, + default_charset: self.default_charset, + collation: self.collation, on_commit: self.on_commit, on_cluster: self.on_cluster, primary_key: self.primary_key, @@ -412,7 +405,7 @@ impl CreateTableBuilder { partition_by: self.partition_by, cluster_by: self.cluster_by, clustered_by: self.clustered_by, - inherits: self.inherits, + options: self.options, strict: self.strict, copy_grants: self.copy_grants, enable_schema_evolution: self.enable_schema_evolution, @@ -423,12 +416,6 @@ impl CreateTableBuilder { with_aggregation_policy: self.with_aggregation_policy, with_row_access_policy: self.with_row_access_policy, with_tags: self.with_tags, - base_location: self.base_location, - external_volume: self.external_volume, - catalog: self.catalog, - catalog_sync: self.catalog_sync, - storage_serialization_policy: self.storage_serialization_policy, - table_options: self.table_options, }) } } @@ -448,19 +435,24 @@ impl TryFrom for CreateTableBuilder { if_not_exists, transient, volatile, - iceberg, name, columns, constraints, hive_distribution, hive_formats, + table_properties, + with_options, file_format, location, query, without_rowid, like, clone, + engine, comment, + auto_increment_offset, + default_charset, + collation, on_commit, on_cluster, primary_key, @@ -468,7 +460,7 @@ impl TryFrom for CreateTableBuilder { partition_by, cluster_by, clustered_by, - inherits, + options, strict, copy_grants, enable_schema_evolution, @@ -479,12 +471,6 @@ impl TryFrom for CreateTableBuilder { with_aggregation_policy, with_row_access_policy, with_tags, - base_location, - external_volume, - catalog, - catalog_sync, - storage_serialization_policy, - table_options, }) => Ok(Self { or_replace, temporary, @@ -497,13 +483,19 @@ impl TryFrom for CreateTableBuilder { constraints, hive_distribution, hive_formats, + table_properties, + with_options, file_format, location, query, without_rowid, like, clone, + engine, comment, + auto_increment_offset, + default_charset, + collation, on_commit, on_cluster, primary_key, @@ -511,9 +503,8 @@ impl TryFrom for CreateTableBuilder { partition_by, cluster_by, clustered_by, - inherits, + options, strict, - iceberg, copy_grants, enable_schema_evolution, change_tracking, @@ -524,12 +515,6 @@ impl TryFrom for CreateTableBuilder { with_row_access_policy, with_tags, volatile, - base_location, - external_volume, - catalog, - catalog_sync, - storage_serialization_policy, - table_options, }), _ => Err(ParserError::ParserError(format!( "Expected create table statement, but received: {stmt}" @@ -542,9 +527,8 @@ impl TryFrom for CreateTableBuilder { #[derive(Default)] pub(crate) struct CreateTableConfiguration { pub partition_by: Option>, - pub cluster_by: Option>>, - pub inherits: Option>, - pub table_options: CreateTableOptions, + pub cluster_by: Option>>, + pub options: Option>, } #[cfg(test)] @@ -555,7 +539,7 @@ mod tests { #[test] pub fn test_from_valid_statement() { - let builder = CreateTableBuilder::new(ObjectName::from(vec![Ident::new("table_name")])); + let builder = CreateTableBuilder::new(ObjectName(vec![Ident::new("table_name")])); let stmt = builder.clone().build(); @@ -564,11 +548,7 @@ mod tests { #[test] pub fn test_from_invalid_statement() { - let stmt = Statement::Commit { - chain: false, - end: false, - modifier: None, - }; + let stmt = Statement::Commit { chain: false }; assert_eq!( CreateTableBuilder::try_from(stmt).unwrap_err(), diff --git a/src/ast/helpers/stmt_data_loading.rs b/src/ast/helpers/stmt_data_loading.rs index 92a72727..cda6c6ea 100644 --- a/src/ast/helpers/stmt_data_loading.rs +++ b/src/ast/helpers/stmt_data_loading.rs @@ -21,13 +21,15 @@ #[cfg(not(feature = "std"))] use alloc::string::String; +#[cfg(not(feature = "std"))] +use alloc::vec::Vec; use core::fmt; +use core::fmt::Formatter; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; -use crate::ast::helpers::key_value_options::KeyValueOptions; -use crate::ast::{Ident, ObjectName, SelectItem}; +use crate::ast::Ident; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; @@ -36,29 +38,35 @@ use sqlparser_derive::{Visit, VisitMut}; #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct StageParamsObject { pub url: Option, - pub encryption: KeyValueOptions, + pub encryption: DataLoadingOptions, pub endpoint: Option, pub storage_integration: Option, - pub credentials: KeyValueOptions, + pub credentials: DataLoadingOptions, } -/// This enum enables support for both standard SQL select item expressions -/// and Snowflake-specific ones for data loading. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum StageLoadSelectItemKind { - SelectItem(SelectItem), - StageLoadSelectItem(StageLoadSelectItem), +pub struct DataLoadingOptions { + pub options: Vec, } -impl fmt::Display for StageLoadSelectItemKind { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match &self { - StageLoadSelectItemKind::SelectItem(item) => write!(f, "{item}"), - StageLoadSelectItemKind::StageLoadSelectItem(item) => write!(f, "{item}"), - } - } +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum DataLoadingOptionType { + STRING, + BOOLEAN, + ENUM, +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct DataLoadingOption { + pub option_name: String, + pub option_type: DataLoadingOptionType, + pub value: String, } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -97,6 +105,42 @@ impl fmt::Display for StageParamsObject { } } +impl fmt::Display for DataLoadingOptions { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + if !self.options.is_empty() { + let mut first = false; + for option in &self.options { + if !first { + first = true; + } else { + f.write_str(" ")?; + } + write!(f, "{}", option)?; + } + } + Ok(()) + } +} + +impl fmt::Display for DataLoadingOption { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.option_type { + DataLoadingOptionType::STRING => { + write!(f, "{}='{}'", self.option_name, self.value)?; + } + DataLoadingOptionType::ENUM => { + // single quote is omitted + write!(f, "{}={}", self.option_name, self.value)?; + } + DataLoadingOptionType::BOOLEAN => { + // single quote is omitted + write!(f, "{}={}", self.option_name, self.value)?; + } + } + Ok(()) + } +} + impl fmt::Display for StageLoadSelectItem { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.alias.is_some() { @@ -112,22 +156,3 @@ impl fmt::Display for StageLoadSelectItem { Ok(()) } } - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct FileStagingCommand { - #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] - pub stage: ObjectName, - pub pattern: Option, -} - -impl fmt::Display for FileStagingCommand { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.stage)?; - if let Some(pattern) = self.pattern.as_ref() { - write!(f, " PATTERN='{pattern}'")?; - } - Ok(()) - } -} diff --git a/src/ast/mod.rs b/src/ast/mod.rs index 75e88f8a..a24739a6 100644 --- a/src/ast/mod.rs +++ b/src/ast/mod.rs @@ -23,17 +23,9 @@ use alloc::{ string::{String, ToString}, vec::Vec, }; -use helpers::{ - attached_token::AttachedToken, - stmt_data_loading::{FileStagingCommand, StageLoadSelectItemKind}, -}; -use core::cmp::Ordering; +use core::fmt::{self, Display}; use core::ops::Deref; -use core::{ - fmt::{self, Display}, - hash, -}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -41,56 +33,35 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; -use crate::{ - display_utils::{indented_list, SpaceOrNewline}, - tokenizer::{Span, Token}, -}; -use crate::{ - display_utils::{Indent, NewLine}, - keywords::Keyword, -}; - pub use self::data_type::{ - ArrayElemTypeDef, BinaryLength, CharLengthUnits, CharacterLength, DataType, EnumMember, - ExactNumberInfo, StructBracketKind, TimezoneInfo, -}; -pub use self::dcl::{ - AlterRoleOperation, ResetConfig, RoleOption, SecondaryRoles, SetConfigValue, Use, + ArrayElemTypeDef, CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, + StructBracketKind, TimezoneInfo, }; +pub use self::dcl::{AlterRoleOperation, ResetConfig, RoleOption, SetConfigValue, Use}; pub use self::ddl::{ - AlterColumnOperation, AlterConnectorOwner, AlterIndexOperation, AlterPolicyOperation, - AlterTableAlgorithm, AlterTableLock, AlterTableOperation, AlterType, AlterTypeAddValue, - AlterTypeAddValuePosition, AlterTypeOperation, AlterTypeRename, AlterTypeRenameValue, - ClusteredBy, ColumnDef, ColumnOption, ColumnOptionDef, ColumnOptions, ColumnPolicy, - ColumnPolicyProperty, ConstraintCharacteristics, CreateConnector, CreateDomain, CreateFunction, - Deduplicate, DeferrableInitial, DropBehavior, GeneratedAs, GeneratedExpressionMode, - IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind, - IdentityPropertyOrder, IndexOption, IndexType, KeyOrIndexDisplay, NullsDistinctOption, Owner, - Partition, ProcedureParam, ReferentialAction, ReplicaIdentity, TableConstraint, - TagsColumnOption, UserDefinedTypeCompositeAttributeDef, UserDefinedTypeRepresentation, - ViewColumnDef, + AlterColumnOperation, AlterIndexOperation, AlterPolicyOperation, AlterTableOperation, + ClusteredBy, ColumnDef, ColumnOption, ColumnOptionDef, ColumnPolicy, ColumnPolicyProperty, + ConstraintCharacteristics, Deduplicate, DeferrableInitial, GeneratedAs, + GeneratedExpressionMode, IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, + IdentityPropertyKind, IdentityPropertyOrder, IndexOption, IndexType, KeyOrIndexDisplay, Owner, + Partition, ProcedureParam, ReferentialAction, TableConstraint, TagsColumnOption, + UserDefinedTypeCompositeAttributeDef, UserDefinedTypeRepresentation, ViewColumnDef, }; -pub use self::dml::{CreateIndex, CreateTable, Delete, IndexColumn, Insert}; +pub use self::dml::{CreateIndex, CreateTable, Delete, Insert}; pub use self::operator::{BinaryOperator, UnaryOperator}; pub use self::query::{ AfterMatchSkip, ConnectBy, Cte, CteAsMaterialized, Distinct, EmptyMatchesMode, - ExceptSelectItem, ExcludeSelectItem, ExprWithAlias, ExprWithAliasAndOrderBy, Fetch, ForClause, - ForJson, ForXml, FormatClause, GroupByExpr, GroupByWithModifier, IdentWithAlias, - IlikeSelectItem, InputFormatClause, Interpolate, InterpolateExpr, Join, JoinConstraint, - JoinOperator, JsonTableColumn, JsonTableColumnErrorHandling, JsonTableNamedColumn, - JsonTableNestedColumn, LateralView, LimitClause, LockClause, LockType, MatchRecognizePattern, - MatchRecognizeSymbol, Measure, NamedWindowDefinition, NamedWindowExpr, NonBlock, Offset, - OffsetRows, OpenJsonTableColumn, OrderBy, OrderByExpr, OrderByKind, OrderByOptions, - PipeOperator, PivotValueSource, ProjectionSelect, Query, RenameSelectItem, - RepetitionQuantifier, ReplaceSelectElement, ReplaceSelectItem, RowsPerMatch, Select, - SelectFlavor, SelectInto, SelectItem, SelectItemQualifiedWildcardKind, SetExpr, SetOperator, - SetQuantifier, Setting, SymbolDefinition, Table, TableAlias, TableAliasColumnDef, TableFactor, - TableFunctionArgs, TableIndexHintForClause, TableIndexHintType, TableIndexHints, - TableIndexType, TableSample, TableSampleBucket, TableSampleKind, TableSampleMethod, - TableSampleModifier, TableSampleQuantity, TableSampleSeed, TableSampleSeedModifier, - TableSampleUnit, TableVersion, TableWithJoins, Top, TopQuantity, UpdateTableFromKind, - ValueTableMode, Values, WildcardAdditionalOptions, With, WithFill, XmlNamespaceDefinition, - XmlPassingArgument, XmlPassingClause, XmlTableColumn, XmlTableColumnOption, + ExceptSelectItem, ExcludeSelectItem, ExprWithAlias, Fetch, ForClause, ForJson, ForXml, + FormatClause, GroupByExpr, GroupByWithModifier, IdentWithAlias, IlikeSelectItem, Interpolate, + InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonTableColumn, + JsonTableColumnErrorHandling, JsonTableNamedColumn, JsonTableNestedColumn, LateralView, + LockClause, LockType, MatchRecognizePattern, MatchRecognizeSymbol, Measure, + NamedWindowDefinition, NamedWindowExpr, NonBlock, Offset, OffsetRows, OrderBy, OrderByExpr, + PivotValueSource, ProjectionSelect, Query, RenameSelectItem, RepetitionQuantifier, + ReplaceSelectElement, ReplaceSelectItem, RowsPerMatch, Select, SelectInto, SelectItem, SetExpr, + SetOperator, SetQuantifier, Setting, SymbolDefinition, Table, TableAlias, TableFactor, + TableFunctionArgs, TableVersion, TableWithJoins, Top, TopQuantity, ValueTableMode, Values, + WildcardAdditionalOptions, With, WithFill, }; pub use self::trigger::{ @@ -100,17 +71,15 @@ pub use self::trigger::{ pub use self::value::{ escape_double_quote_string, escape_quoted_string, DateTimeField, DollarQuotedString, - NormalizationForm, TrimWhereField, Value, ValueWithSpan, + TrimWhereField, Value, }; -use crate::ast::helpers::key_value_options::KeyValueOptions; -use crate::ast::helpers::stmt_data_loading::StageParamsObject; - +use crate::ast::helpers::stmt_data_loading::{ + DataLoadingOptions, StageLoadSelectItem, StageParamsObject, +}; #[cfg(feature = "visitor")] pub use visitor::*; -pub use self::data_type::GeometricTypeKind; - mod data_type; mod dcl; mod ddl; @@ -118,9 +87,6 @@ mod dml; pub mod helpers; mod operator; mod query; -mod spans; -pub use spans::Spanned; - mod trigger; mod value; @@ -135,16 +101,16 @@ where sep: &'static str, } -impl fmt::Display for DisplaySeparated<'_, T> +impl<'a, T> fmt::Display for DisplaySeparated<'a, T> where T: fmt::Display, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut delim = ""; for t in self.slice { - f.write_str(delim)?; + write!(f, "{delim}")?; delim = self.sep; - t.fmt(f)?; + write!(f, "{t}")?; } Ok(()) } @@ -164,17 +130,8 @@ where DisplaySeparated { slice, sep: ", " } } -/// Writes the given statements to the formatter, each ending with -/// a semicolon and space separated. -fn format_statement_list(f: &mut fmt::Formatter, statements: &[Statement]) -> fmt::Result { - write!(f, "{}", display_separated(statements, "; "))?; - // We manually insert semicolon for the last statement, - // since display_separated doesn't handle that case. - write!(f, ";") -} - /// An identifier, decomposed into its value or character data and the quote style. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct Ident { @@ -183,70 +140,10 @@ pub struct Ident { /// The starting quote if any. Valid quote characters are the single quote, /// double quote, backtick, and opening square bracket. pub quote_style: Option, - /// The span of the identifier in the original SQL string. - pub span: Span, -} - -impl PartialEq for Ident { - fn eq(&self, other: &Self) -> bool { - let Ident { - value, - quote_style, - // exhaustiveness check; we ignore spans in comparisons - span: _, - } = self; - - value == &other.value && quote_style == &other.quote_style - } -} - -impl core::hash::Hash for Ident { - fn hash(&self, state: &mut H) { - let Ident { - value, - quote_style, - // exhaustiveness check; we ignore spans in hashes - span: _, - } = self; - - value.hash(state); - quote_style.hash(state); - } -} - -impl Eq for Ident {} - -impl PartialOrd for Ident { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for Ident { - fn cmp(&self, other: &Self) -> Ordering { - let Ident { - value, - quote_style, - // exhaustiveness check; we ignore spans in ordering - span: _, - } = self; - - let Ident { - value: other_value, - quote_style: other_quote_style, - // exhaustiveness check; we ignore spans in ordering - span: _, - } = other; - - // First compare by value, then by quote_style - value - .cmp(other_value) - .then_with(|| quote_style.cmp(other_quote_style)) - } } impl Ident { - /// Create a new identifier with the given value and no quotes and an empty span. + /// Create a new identifier with the given value and no quotes. pub fn new(value: S) -> Self where S: Into, @@ -254,7 +151,6 @@ impl Ident { Ident { value: value.into(), quote_style: None, - span: Span::empty(), } } @@ -268,30 +164,6 @@ impl Ident { Ident { value: value.into(), quote_style: Some(quote), - span: Span::empty(), - } - } - - pub fn with_span(span: Span, value: S) -> Self - where - S: Into, - { - Ident { - value: value.into(), - quote_style: None, - span, - } - } - - pub fn with_quote_and_span(quote: char, span: Span, value: S) -> Self - where - S: Into, - { - assert!(quote == '\'' || quote == '"' || quote == '`' || quote == '['); - Ident { - value: value.into(), - quote_style: Some(quote), - span, } } } @@ -301,7 +173,6 @@ impl From<&str> for Ident { Ident { value: value.to_string(), quote_style: None, - span: Span::empty(), } } } @@ -324,13 +195,7 @@ impl fmt::Display for Ident { #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct ObjectName(pub Vec); - -impl From> for ObjectName { - fn from(idents: Vec) -> Self { - ObjectName(idents.into_iter().map(ObjectNamePart::Identifier).collect()) - } -} +pub struct ObjectName(pub Vec); impl fmt::Display for ObjectName { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -338,52 +203,6 @@ impl fmt::Display for ObjectName { } } -/// A single part of an ObjectName -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum ObjectNamePart { - Identifier(Ident), - Function(ObjectNamePartFunction), -} - -impl ObjectNamePart { - pub fn as_ident(&self) -> Option<&Ident> { - match self { - ObjectNamePart::Identifier(ident) => Some(ident), - ObjectNamePart::Function(_) => None, - } - } -} - -impl fmt::Display for ObjectNamePart { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ObjectNamePart::Identifier(ident) => write!(f, "{ident}"), - ObjectNamePart::Function(func) => write!(f, "{func}"), - } - } -} - -/// An object name part that consists of a function that dynamically -/// constructs identifiers. -/// -/// - [Snowflake](https://docs.snowflake.com/en/sql-reference/identifier-literal) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct ObjectNamePartFunction { - pub name: Ident, - pub args: Vec, -} - -impl fmt::Display for ObjectNamePartFunction { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}(", self.name)?; - write!(f, "{})", display_comma_separated(&self.args)) - } -} - /// Represents an Array Expression, either /// `ARRAY[..]`, or `[..]` #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -474,36 +293,28 @@ impl fmt::Display for Interval { /// A field definition within a struct /// -/// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct_type +/// [bigquery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct_type #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct StructField { pub field_name: Option, pub field_type: DataType, - /// Struct field options. - /// See [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#column_name_and_column_schema) - pub options: Option>, } impl fmt::Display for StructField { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if let Some(name) = &self.field_name { - write!(f, "{name} {}", self.field_type)?; + write!(f, "{name} {}", self.field_type) } else { - write!(f, "{}", self.field_type)?; - } - if let Some(options) = &self.options { - write!(f, " OPTIONS({})", display_separated(options, ", ")) - } else { - Ok(()) + write!(f, "{}", self.field_type) } } } /// A field definition within a union /// -/// [DuckDB]: https://duckdb.org/docs/sql/data_types/union.html +/// [duckdb]: https://duckdb.org/docs/sql/data_types/union.html #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -520,7 +331,7 @@ impl fmt::Display for UnionField { /// A dictionary field within a dictionary. /// -/// [DuckDB]: https://duckdb.org/docs/sql/data_types/struct#creating-structs +/// [duckdb]: https://duckdb.org/docs/sql/data_types/struct#creating-structs #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -551,7 +362,7 @@ impl Display for Map { /// A map field within a map. /// -/// [DuckDB]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps +/// [duckdb]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -576,6 +387,40 @@ pub enum CastFormat { ValueAtTimeZone(Value, Value), } +/// Represents the syntax/style used in a map access. +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum MapAccessSyntax { + /// Access using bracket notation. `mymap[mykey]` + Bracket, + /// Access using period notation. `mymap.mykey` + Period, +} + +/// Expression used to access a value in a nested structure. +/// +/// Example: `SAFE_OFFSET(0)` in +/// ```sql +/// SELECT mymap[SAFE_OFFSET(0)]; +/// ``` +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct MapAccessKey { + pub key: Expr, + pub syntax: MapAccessSyntax, +} + +impl fmt::Display for MapAccessKey { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.syntax { + MapAccessSyntax::Bracket => write!(f, "[{}]", self.key), + MapAccessSyntax::Period => write!(f, ".{}", self.key), + } + } +} + /// An element of a JSON path. #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -683,44 +528,11 @@ pub enum CeilFloorKind { Scale(Value), } -/// A WHEN clause in a CASE expression containing both -/// the condition and its corresponding result -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct CaseWhen { - pub condition: Expr, - pub result: Expr, -} - -impl fmt::Display for CaseWhen { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("WHEN ")?; - self.condition.fmt(f)?; - f.write_str(" THEN")?; - SpaceOrNewline.fmt(f)?; - Indent(&self.result).fmt(f)?; - Ok(()) - } -} - /// An SQL expression of any type. /// -/// # Semantics / Type Checking -/// /// The parser does not distinguish between expressions of different types -/// (e.g. boolean vs string). The caller is responsible for detecting and -/// validating types as necessary (for example `WHERE 1` vs `SELECT 1=1`) -/// See the [README.md] for more details. -/// -/// [README.md]: https://github.com/apache/datafusion-sqlparser-rs/blob/main/README.md#syntax-vs-semantics -/// -/// # Equality and Hashing Does not Include Source Locations -/// -/// The `Expr` type implements `PartialEq` and `Eq` based on the semantic value -/// of the expression (not bitwise comparison). This means that `Expr` instances -/// that are semantically equivalent but have different spans (locations in the -/// source tree) will compare as equal. +/// (e.g. boolean vs string), so the caller must handle expressions of +/// inappropriate type, like `WHERE 1` or `SELECT 1=1`, as necessary. #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr( @@ -733,28 +545,6 @@ pub enum Expr { Identifier(Ident), /// Multi-part identifier, e.g. `table_alias.column` or `schema.table.col` CompoundIdentifier(Vec), - /// Multi-part expression access. - /// - /// This structure represents an access chain in structured / nested types - /// such as maps, arrays, and lists: - /// - Array - /// - A 1-dim array `a[1]` will be represented like: - /// `CompoundFieldAccess(Ident('a'), vec![Subscript(1)]` - /// - A 2-dim array `a[1][2]` will be represented like: - /// `CompoundFieldAccess(Ident('a'), vec![Subscript(1), Subscript(2)]` - /// - Map or Struct (Bracket-style) - /// - A map `a['field1']` will be represented like: - /// `CompoundFieldAccess(Ident('a'), vec![Subscript('field')]` - /// - A 2-dim map `a['field1']['field2']` will be represented like: - /// `CompoundFieldAccess(Ident('a'), vec![Subscript('field1'), Subscript('field2')]` - /// - Struct (Dot-style) (only effect when the chain contains both subscript and expr) - /// - A struct access `a[field1].field2` will be represented like: - /// `CompoundFieldAccess(Ident('a'), vec![Subscript('field1'), Ident('field2')]` - /// - If a struct access likes `a.field1.field2`, it will be represented by CompoundIdentifier([a, field1, field2]) - CompoundFieldAccess { - root: Box, - access_chain: Vec, - }, /// Access data nested in a value containing semi-structured data, such as /// the `VARIANT` type on Snowflake. for example `src:customer[0].name`. /// @@ -766,6 +556,11 @@ pub enum Expr { /// The path to the data to extract. path: JsonPath, }, + /// CompositeAccess (postgres) eg: SELECT (information_schema._pg_expandarray(array['i','i'])).n + CompositeAccess { + expr: Box, + key: Ident, + }, /// `IS FALSE` operator IsFalse(Box), /// `IS NOT FALSE` operator @@ -786,12 +581,6 @@ pub enum Expr { IsDistinctFrom(Box, Box), /// `IS NOT DISTINCT FROM` operator IsNotDistinctFrom(Box, Box), - /// ` IS [ NOT ] [ form ] NORMALIZED` - IsNormalized { - expr: Box, - form: Option, - negated: bool, - }, /// `[ NOT ] IN (val1, val2, ...)` InList { expr: Box, @@ -831,7 +620,7 @@ pub enum Expr { any: bool, expr: Box, pattern: Box, - escape_char: Option, + escape_char: Option, }, /// `ILIKE` (case-insensitive `LIKE`) ILike { @@ -841,14 +630,14 @@ pub enum Expr { any: bool, expr: Box, pattern: Box, - escape_char: Option, + escape_char: Option, }, /// SIMILAR TO regex SimilarTo { negated: bool, expr: Box, pattern: Box, - escape_char: Option, + escape_char: Option, }, /// MySQL: RLIKE regex or REGEXP regex RLike { @@ -902,9 +691,8 @@ pub enum Expr { kind: CastKind, expr: Box, data_type: DataType, - /// Optional CAST(string_expression AS type FORMAT format_string_expression) as used by [BigQuery] - /// - /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements#formatting_syntax + // Optional CAST(string_expression AS type FORMAT format_string_expression) as used by BigQuery + // https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements#formatting_syntax format: Option, }, /// AT a timestamp to a different timezone e.g. `FROM_UNIXTIME(0) AT TIME ZONE 'UTC-06:00'` @@ -967,10 +755,6 @@ pub enum Expr { /// true if the expression is represented using the `SUBSTRING(expr, start, len)` syntax /// This flag is used for formatting. special: bool, - - /// true if the expression is represented using the `SUBSTR` shorthand - /// This flag is used for formatting. - shorthand: bool, }, /// ```sql /// TRIM([BOTH | LEADING | TRAILING] [ FROM] ) @@ -1001,24 +785,26 @@ pub enum Expr { /// Nested expression e.g. `(foo > bar)` or `(1)` Nested(Box), /// A literal value, such as string, number, date or NULL - Value(ValueWithSpan), - /// Prefixed expression, e.g. introducer strings, projection prefix + Value(Value), /// - /// - Prefixed { - prefix: Ident, - /// The value of the constant. - /// Hint: you can unwrap the string value using `value.into_string()`. - value: Box, + IntroducedString { + introducer: String, + value: Value, }, /// A constant of form ` 'value'`. /// This can represent ANSI SQL `DATE`, `TIME`, and `TIMESTAMP` literals (such as `DATE '2020-01-01'`), /// as well as constants of other types (a non-standard PostgreSQL extension). TypedString { data_type: DataType, - /// The value of the constant. - /// Hint: you can unwrap the string value using `value.into_string()`. - value: ValueWithSpan, + value: String, + }, + /// Access a map-like object by field (e.g. `column['field']` or `column[4]` + /// Note that depending on the dialect, struct like accesses may be + /// parsed as [`Subscript`](Self::Subscript) or [`MapAccess`](Self::MapAccess) + /// + MapAccess { + column: Box, + keys: Vec, }, /// Scalar function call e.g. `LEFT(foo, 5)` Function(Function), @@ -1028,10 +814,9 @@ pub enum Expr { /// not `< 0` nor `1, 2, 3` as allowed in a `` per /// Case { - case_token: AttachedToken, - end_token: AttachedToken, operand: Option>, - conditions: Vec, + conditions: Vec, + results: Vec, else_result: Option>, }, /// An exists expression `[ NOT ] EXISTS(SELECT ...)`, used in expressions like @@ -1051,14 +836,12 @@ pub enum Expr { Rollup(Vec>), /// ROW / TUPLE a single value, such as `SELECT (1, 2)` Tuple(Vec), - /// `Struct` literal expression + /// `BigQuery` specific `Struct` literal expression [1] /// Syntax: /// ```sql /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ]) - /// - /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct_type) - /// [Databricks](https://docs.databricks.com/en/sql/language-manual/functions/struct.html) /// ``` + /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct_type Struct { /// Struct values. values: Vec, @@ -1092,6 +875,11 @@ pub enum Expr { /// ``` /// [1]: https://duckdb.org/docs/sql/data_types/map#creating-maps Map(Map), + /// An access of nested data using subscript syntax, for example `array[2]`. + Subscript { + expr: Box, + subscript: Box, + }, /// An array expression e.g. `ARRAY[1, 2]` Array(Array), /// An interval expression e.g. `INTERVAL '1' YEAR` @@ -1108,16 +896,16 @@ pub enum Expr { /// [(1)]: https://dev.mysql.com/doc/refman/8.0/en/fulltext-search.html#function_match MatchAgainst { /// `(, , ...)`. - columns: Vec, + columns: Vec, /// ``. match_value: Value, /// `` opt_search_modifier: Option, }, - Wildcard(AttachedToken), + Wildcard, /// Qualified wildcard, e.g. `alias.*` or `schema.table.*`. /// (Same caveats apply to `QualifiedWildcard` as to `Wildcard`.) - QualifiedWildcard(ObjectName, AttachedToken), + QualifiedWildcard(ObjectName), /// Some dialects support an older syntax for outer joins where columns are /// marked with the `(+)` operator in the WHERE clause, for example: /// @@ -1142,19 +930,8 @@ pub enum Expr { /// param -> expr | (param1, ...) -> expr /// ``` /// - /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/functions#higher-order-functions---operator-and-lambdaparams-expr-function) - /// [Databricks](https://docs.databricks.com/en/sql/language-manual/sql-ref-lambda-functions.html) - /// [DuckDb](https://duckdb.org/docs/sql/functions/lambda.html) + /// See . Lambda(LambdaFunction), - /// Checks membership of a value in a JSON array - MemberOf(MemberOf), -} - -impl Expr { - /// Creates a new [`Expr::Value`] - pub fn value(value: impl Into) -> Self { - Expr::Value(value.into()) - } } /// The contents inside the `[` and `]` in a subscript expression. @@ -1219,27 +996,6 @@ impl fmt::Display for Subscript { } } -/// An element of a [`Expr::CompoundFieldAccess`]. -/// It can be an expression or a subscript. -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum AccessExpr { - /// Accesses a field using dot notation, e.g. `foo.bar.baz`. - Dot(Expr), - /// Accesses a field or array element using bracket notation, e.g. `foo['bar']`. - Subscript(Subscript), -} - -impl fmt::Display for AccessExpr { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - AccessExpr::Dot(expr) => write!(f, ".{expr}"), - AccessExpr::Subscript(subscript) => write!(f, "[{subscript}]"), - } - } -} - /// A lambda function. #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -1263,7 +1019,7 @@ impl fmt::Display for LambdaFunction { /// `OneOrManyWithParens` implements `Deref` and `IntoIterator`, /// so you can call slice methods on it and iterate over items /// # Examples -/// Accessing as a slice: +/// Acessing as a slice: /// ``` /// # use sqlparser::ast::OneOrManyWithParens; /// let one = OneOrManyWithParens::One("a"); @@ -1432,20 +1188,15 @@ impl fmt::Display for CastFormat { } impl fmt::Display for Expr { - #[cfg_attr(feature = "recursive-protection", recursive::recursive)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Expr::Identifier(s) => write!(f, "{s}"), - Expr::Wildcard(_) => f.write_str("*"), - Expr::QualifiedWildcard(prefix, _) => write!(f, "{prefix}.*"), - Expr::CompoundIdentifier(s) => write!(f, "{}", display_separated(s, ".")), - Expr::CompoundFieldAccess { root, access_chain } => { - write!(f, "{root}")?; - for field in access_chain { - write!(f, "{field}")?; - } - Ok(()) + Expr::MapAccess { column, keys } => { + write!(f, "{column}{}", display_separated(keys, "")) } + Expr::Wildcard => f.write_str("*"), + Expr::QualifiedWildcard(prefix) => write!(f, "{}.*", prefix), + Expr::CompoundIdentifier(s) => write!(f, "{}", display_separated(s, ".")), Expr::IsTrue(ast) => write!(f, "{ast} IS TRUE"), Expr::IsNotTrue(ast) => write!(f, "{ast} IS NOT TRUE"), Expr::IsFalse(ast) => write!(f, "{ast} IS FALSE"), @@ -1510,7 +1261,7 @@ impl fmt::Display for Expr { } => match escape_char { Some(ch) => write!( f, - "{} {}LIKE {}{} ESCAPE {}", + "{} {}LIKE {}{} ESCAPE '{}'", expr, if *negated { "NOT " } else { "" }, if *any { "ANY " } else { "" }, @@ -1535,7 +1286,7 @@ impl fmt::Display for Expr { } => match escape_char { Some(ch) => write!( f, - "{} {}ILIKE {}{} ESCAPE {}", + "{} {}ILIKE {}{} ESCAPE '{}'", expr, if *negated { "NOT " } else { "" }, if *any { "ANY" } else { "" }, @@ -1564,24 +1315,6 @@ impl fmt::Display for Expr { if *regexp { "REGEXP" } else { "RLIKE" }, pattern ), - Expr::IsNormalized { - expr, - form, - negated, - } => { - let not_ = if *negated { "NOT " } else { "" }; - if form.is_none() { - write!(f, "{expr} IS {not_}NORMALIZED") - } else { - write!( - f, - "{} IS {}{} NORMALIZED", - expr, - not_, - form.as_ref().unwrap() - ) - } - } Expr::SimilarTo { negated, expr, @@ -1590,7 +1323,7 @@ impl fmt::Display for Expr { } => match escape_char { Some(ch) => write!( f, - "{} {}SIMILAR TO {} ESCAPE {}", + "{} {}SIMILAR TO {} ESCAPE '{}'", expr, if *negated { "NOT " } else { "" }, pattern, @@ -1635,15 +1368,7 @@ impl fmt::Display for Expr { Expr::UnaryOp { op, expr } => { if op == &UnaryOperator::PGPostfixFactorial { write!(f, "{expr}{op}") - } else if matches!( - op, - UnaryOperator::Not - | UnaryOperator::Hash - | UnaryOperator::AtDashAt - | UnaryOperator::DoubleAt - | UnaryOperator::QuestionDash - | UnaryOperator::QuestionPipe - ) { + } else if op == &UnaryOperator::Not { write!(f, "{op} {expr}") } else { write!(f, "{op}{expr}") @@ -1733,36 +1458,30 @@ impl fmt::Display for Expr { Expr::Collate { expr, collation } => write!(f, "{expr} COLLATE {collation}"), Expr::Nested(ast) => write!(f, "({ast})"), Expr::Value(v) => write!(f, "{v}"), - Expr::Prefixed { prefix, value } => write!(f, "{prefix} {value}"), + Expr::IntroducedString { introducer, value } => write!(f, "{introducer} {value}"), Expr::TypedString { data_type, value } => { write!(f, "{data_type}")?; - write!(f, " {value}") + write!(f, " '{}'", &value::escape_single_quote_string(value)) } - Expr::Function(fun) => fun.fmt(f), + Expr::Function(fun) => write!(f, "{fun}"), Expr::Case { - case_token: _, - end_token: _, operand, conditions, + results, else_result, } => { - f.write_str("CASE")?; + write!(f, "CASE")?; if let Some(operand) = operand { - f.write_str(" ")?; - operand.fmt(f)?; + write!(f, " {operand}")?; } - for when in conditions { - SpaceOrNewline.fmt(f)?; - Indent(when).fmt(f)?; + for (c, r) in conditions.iter().zip(results) { + write!(f, " WHEN {c} THEN {r}")?; } + if let Some(else_result) = else_result { - SpaceOrNewline.fmt(f)?; - Indent("ELSE").fmt(f)?; - SpaceOrNewline.fmt(f)?; - Indent(Indent(else_result)).fmt(f)?; + write!(f, " ELSE {else_result}")?; } - SpaceOrNewline.fmt(f)?; - f.write_str("END") + write!(f, " END") } Expr::Exists { subquery, negated } => write!( f, @@ -1814,13 +1533,8 @@ impl fmt::Display for Expr { substring_from, substring_for, special, - shorthand, } => { - f.write_str("SUBSTR")?; - if !*shorthand { - f.write_str("ING")?; - } - write!(f, "({expr}")?; + write!(f, "SUBSTRING({expr}")?; if let Some(from_part) = substring_from { if *special { write!(f, ", {from_part}")?; @@ -1893,7 +1607,7 @@ impl fmt::Display for Expr { } } Expr::Named { expr, name } => { - write!(f, "{expr} AS {name}") + write!(f, "{} AS {}", expr, name) } Expr::Dictionary(fields) => { write!(f, "{{{}}}", display_comma_separated(fields)) @@ -1901,12 +1615,21 @@ impl fmt::Display for Expr { Expr::Map(map) => { write!(f, "{map}") } + Expr::Subscript { + expr, + subscript: key, + } => { + write!(f, "{expr}[{key}]") + } Expr::Array(set) => { write!(f, "{set}") } Expr::JsonAccess { value, path } => { write!(f, "{value}{path}") } + Expr::CompositeAccess { expr, key } => { + write!(f, "{expr}.{key}") + } Expr::AtTimeZone { timestamp, time_zone, @@ -1936,7 +1659,6 @@ impl fmt::Display for Expr { } Expr::Prior(expr) => write!(f, "PRIOR {expr}"), Expr::Lambda(lambda) => write!(f, "{lambda}"), - Expr::MemberOf(member_of) => write!(f, "{member_of}"), } } } @@ -1952,14 +1674,8 @@ pub enum WindowType { impl Display for WindowType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - WindowType::WindowSpec(spec) => { - f.write_str("(")?; - NewLine.fmt(f)?; - Indent(spec).fmt(f)?; - NewLine.fmt(f)?; - f.write_str(")") - } - WindowType::NamedWindow(name) => name.fmt(f), + WindowType::WindowSpec(spec) => write!(f, "({})", spec), + WindowType::NamedWindow(name) => write!(f, "{}", name), } } } @@ -1987,19 +1703,14 @@ pub struct WindowSpec { impl fmt::Display for WindowSpec { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut is_first = true; + let mut delim = ""; if let Some(window_name) = &self.window_name { - if !is_first { - SpaceOrNewline.fmt(f)?; - } - is_first = false; + delim = " "; write!(f, "{window_name}")?; } if !self.partition_by.is_empty() { - if !is_first { - SpaceOrNewline.fmt(f)?; - } - is_first = false; + f.write_str(delim)?; + delim = " "; write!( f, "PARTITION BY {}", @@ -2007,16 +1718,12 @@ impl fmt::Display for WindowSpec { )?; } if !self.order_by.is_empty() { - if !is_first { - SpaceOrNewline.fmt(f)?; - } - is_first = false; + f.write_str(delim)?; + delim = " "; write!(f, "ORDER BY {}", display_comma_separated(&self.order_by))?; } if let Some(window_frame) = &self.window_frame { - if !is_first { - SpaceOrNewline.fmt(f)?; - } + f.write_str(delim)?; if let Some(end_bound) = &window_frame.end_bound { write!( f, @@ -2177,10 +1884,6 @@ pub enum CommentObject { Column, Table, Extension, - Schema, - Database, - User, - Role, } impl fmt::Display for CommentObject { @@ -2189,10 +1892,6 @@ impl fmt::Display for CommentObject { CommentObject::Column => f.write_str("COLUMN"), CommentObject::Table => f.write_str("TABLE"), CommentObject::Extension => f.write_str("EXTENSION"), - CommentObject::Schema => f.write_str("SCHEMA"), - CommentObject::Database => f.write_str("DATABASE"), - CommentObject::User => f.write_str("USER"), - CommentObject::Role => f.write_str("ROLE"), } } } @@ -2205,345 +1904,6 @@ pub enum Password { NullPassword, } -/// A `CASE` statement. -/// -/// Examples: -/// ```sql -/// CASE -/// WHEN EXISTS(SELECT 1) -/// THEN SELECT 1 FROM T; -/// WHEN EXISTS(SELECT 2) -/// THEN SELECT 1 FROM U; -/// ELSE -/// SELECT 1 FROM V; -/// END CASE; -/// ``` -/// -/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#case_search_expression) -/// [Snowflake](https://docs.snowflake.com/en/sql-reference/snowflake-scripting/case) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct CaseStatement { - /// The `CASE` token that starts the statement. - pub case_token: AttachedToken, - pub match_expr: Option, - pub when_blocks: Vec, - pub else_block: Option, - /// The last token of the statement (`END` or `CASE`). - pub end_case_token: AttachedToken, -} - -impl fmt::Display for CaseStatement { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let CaseStatement { - case_token: _, - match_expr, - when_blocks, - else_block, - end_case_token: AttachedToken(end), - } = self; - - write!(f, "CASE")?; - - if let Some(expr) = match_expr { - write!(f, " {expr}")?; - } - - if !when_blocks.is_empty() { - write!(f, " {}", display_separated(when_blocks, " "))?; - } - - if let Some(else_block) = else_block { - write!(f, " {else_block}")?; - } - - write!(f, " END")?; - - if let Token::Word(w) = &end.token { - if w.keyword == Keyword::CASE { - write!(f, " CASE")?; - } - } - - Ok(()) - } -} - -/// An `IF` statement. -/// -/// Example (BigQuery or Snowflake): -/// ```sql -/// IF TRUE THEN -/// SELECT 1; -/// SELECT 2; -/// ELSEIF TRUE THEN -/// SELECT 3; -/// ELSE -/// SELECT 4; -/// END IF -/// ``` -/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#if) -/// [Snowflake](https://docs.snowflake.com/en/sql-reference/snowflake-scripting/if) -/// -/// Example (MSSQL): -/// ```sql -/// IF 1=1 SELECT 1 ELSE SELECT 2 -/// ``` -/// [MSSQL](https://learn.microsoft.com/en-us/sql/t-sql/language-elements/if-else-transact-sql?view=sql-server-ver16) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct IfStatement { - pub if_block: ConditionalStatementBlock, - pub elseif_blocks: Vec, - pub else_block: Option, - pub end_token: Option, -} - -impl fmt::Display for IfStatement { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let IfStatement { - if_block, - elseif_blocks, - else_block, - end_token, - } = self; - - write!(f, "{if_block}")?; - - for elseif_block in elseif_blocks { - write!(f, " {elseif_block}")?; - } - - if let Some(else_block) = else_block { - write!(f, " {else_block}")?; - } - - if let Some(AttachedToken(end_token)) = end_token { - write!(f, " END {end_token}")?; - } - - Ok(()) - } -} - -/// A `WHILE` statement. -/// -/// Example: -/// ```sql -/// WHILE @@FETCH_STATUS = 0 -/// BEGIN -/// FETCH NEXT FROM c1 INTO @var1, @var2; -/// END -/// ``` -/// -/// [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/language-elements/while-transact-sql) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct WhileStatement { - pub while_block: ConditionalStatementBlock, -} - -impl fmt::Display for WhileStatement { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let WhileStatement { while_block } = self; - write!(f, "{while_block}")?; - Ok(()) - } -} - -/// A block within a [Statement::Case] or [Statement::If] or [Statement::While]-like statement -/// -/// Example 1: -/// ```sql -/// WHEN EXISTS(SELECT 1) THEN SELECT 1; -/// ``` -/// -/// Example 2: -/// ```sql -/// IF TRUE THEN SELECT 1; SELECT 2; -/// ``` -/// -/// Example 3: -/// ```sql -/// ELSE SELECT 1; SELECT 2; -/// ``` -/// -/// Example 4: -/// ```sql -/// WHILE @@FETCH_STATUS = 0 -/// BEGIN -/// FETCH NEXT FROM c1 INTO @var1, @var2; -/// END -/// ``` -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct ConditionalStatementBlock { - pub start_token: AttachedToken, - pub condition: Option, - pub then_token: Option, - pub conditional_statements: ConditionalStatements, -} - -impl ConditionalStatementBlock { - pub fn statements(&self) -> &Vec { - self.conditional_statements.statements() - } -} - -impl fmt::Display for ConditionalStatementBlock { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let ConditionalStatementBlock { - start_token: AttachedToken(start_token), - condition, - then_token, - conditional_statements, - } = self; - - write!(f, "{start_token}")?; - - if let Some(condition) = condition { - write!(f, " {condition}")?; - } - - if then_token.is_some() { - write!(f, " THEN")?; - } - - if !conditional_statements.statements().is_empty() { - write!(f, " {conditional_statements}")?; - } - - Ok(()) - } -} - -/// A list of statements in a [ConditionalStatementBlock]. -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum ConditionalStatements { - /// SELECT 1; SELECT 2; SELECT 3; ... - Sequence { statements: Vec }, - /// BEGIN SELECT 1; SELECT 2; SELECT 3; ... END - BeginEnd(BeginEndStatements), -} - -impl ConditionalStatements { - pub fn statements(&self) -> &Vec { - match self { - ConditionalStatements::Sequence { statements } => statements, - ConditionalStatements::BeginEnd(bes) => &bes.statements, - } - } -} - -impl fmt::Display for ConditionalStatements { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ConditionalStatements::Sequence { statements } => { - if !statements.is_empty() { - format_statement_list(f, statements)?; - } - Ok(()) - } - ConditionalStatements::BeginEnd(bes) => write!(f, "{bes}"), - } - } -} - -/// Represents a list of statements enclosed within `BEGIN` and `END` keywords. -/// Example: -/// ```sql -/// BEGIN -/// SELECT 1; -/// SELECT 2; -/// END -/// ``` -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct BeginEndStatements { - pub begin_token: AttachedToken, - pub statements: Vec, - pub end_token: AttachedToken, -} - -impl fmt::Display for BeginEndStatements { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let BeginEndStatements { - begin_token: AttachedToken(begin_token), - statements, - end_token: AttachedToken(end_token), - } = self; - - if begin_token.token != Token::EOF { - write!(f, "{begin_token} ")?; - } - if !statements.is_empty() { - format_statement_list(f, statements)?; - } - if end_token.token != Token::EOF { - write!(f, " {end_token}")?; - } - Ok(()) - } -} - -/// A `RAISE` statement. -/// -/// Examples: -/// ```sql -/// RAISE USING MESSAGE = 'error'; -/// -/// RAISE myerror; -/// ``` -/// -/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#raise) -/// [Snowflake](https://docs.snowflake.com/en/sql-reference/snowflake-scripting/raise) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct RaiseStatement { - pub value: Option, -} - -impl fmt::Display for RaiseStatement { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let RaiseStatement { value } = self; - - write!(f, "RAISE")?; - if let Some(value) = value { - write!(f, " {value}")?; - } - - Ok(()) - } -} - -/// Represents the error value of a [RaiseStatement]. -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum RaiseStatementValue { - /// `RAISE USING MESSAGE = 'error'` - UsingMessage(Expr), - /// `RAISE myerror` - Expr(Expr), -} - -impl fmt::Display for RaiseStatementValue { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - RaiseStatementValue::Expr(expr) => write!(f, "{expr}"), - RaiseStatementValue::UsingMessage(expr) => write!(f, "USING MESSAGE = {expr}"), - } - } -} - /// Represents an expression assignment within a variable `DECLARE` statement. /// /// Examples: @@ -2613,11 +1973,10 @@ impl fmt::Display for DeclareAssignment { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum DeclareType { - /// Cursor variable type. e.g. [Snowflake] [PostgreSQL] [MsSql] + /// Cursor variable type. e.g. [Snowflake] [Postgres] /// /// [Snowflake]: https://docs.snowflake.com/en/developer-guide/snowflake-scripting/cursors#declaring-a-cursor - /// [PostgreSQL]: https://www.postgresql.org/docs/current/plpgsql-cursors.html - /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-cursor-transact-sql + /// [Postgres]: https://www.postgresql.org/docs/current/plpgsql-cursors.html Cursor, /// Result set variable type. [Snowflake] @@ -2656,7 +2015,7 @@ impl fmt::Display for DeclareType { } /// A `DECLARE` statement. -/// [PostgreSQL] [Snowflake] [BigQuery] +/// [Postgres] [Snowflake] [BigQuery] /// /// Examples: /// ```sql @@ -2664,7 +2023,7 @@ impl fmt::Display for DeclareType { /// DECLARE liahona CURSOR FOR SELECT * FROM films; /// ``` /// -/// [PostgreSQL]: https://www.postgresql.org/docs/current/sql-declare.html +/// [Postgres]: https://www.postgresql.org/docs/current/sql-declare.html /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -2786,18 +2145,6 @@ pub enum CreateTableOptions { /// /// Options(Vec), - - /// Plain options, options which are not part on any declerative statement e.g. WITH/OPTIONS/... - /// - Plain(Vec), - - TableProperties(Vec), -} - -impl Default for CreateTableOptions { - fn default() -> Self { - Self::None - } } impl fmt::Display for CreateTableOptions { @@ -2809,12 +2156,6 @@ impl fmt::Display for CreateTableOptions { CreateTableOptions::Options(options) => { write!(f, "OPTIONS({})", display_comma_separated(options)) } - CreateTableOptions::TableProperties(options) => { - write!(f, "TBLPROPERTIES ({})", display_comma_separated(options)) - } - CreateTableOptions::Plain(options) => { - write!(f, "{}", display_separated(options, " ")) - } CreateTableOptions::None => Ok(()), } } @@ -2878,202 +2219,6 @@ pub enum CreatePolicyCommand { Delete, } -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum Set { - /// SQL Standard-style - /// SET a = 1; - SingleAssignment { - scope: Option, - hivevar: bool, - variable: ObjectName, - values: Vec, - }, - /// Snowflake-style - /// SET (a, b, ..) = (1, 2, ..); - ParenthesizedAssignments { - variables: Vec, - values: Vec, - }, - /// MySQL-style - /// SET a = 1, b = 2, ..; - MultipleAssignments { assignments: Vec }, - /// MS-SQL session - /// - /// See - SetSessionParam(SetSessionParamKind), - /// ```sql - /// SET [ SESSION | LOCAL ] ROLE role_name - /// ``` - /// - /// Sets session state. Examples: [ANSI][1], [Postgresql][2], [MySQL][3], and [Oracle][4] - /// - /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#set-role-statement - /// [2]: https://www.postgresql.org/docs/14/sql-set-role.html - /// [3]: https://dev.mysql.com/doc/refman/8.0/en/set-role.html - /// [4]: https://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_10004.htm - SetRole { - /// Non-ANSI optional identifier to inform if the role is defined inside the current session (`SESSION`) or transaction (`LOCAL`). - context_modifier: Option, - /// Role name. If NONE is specified, then the current role name is removed. - role_name: Option, - }, - /// ```sql - /// SET TIME ZONE - /// ``` - /// - /// Note: this is a PostgreSQL-specific statements - /// `SET TIME ZONE ` is an alias for `SET timezone TO ` in PostgreSQL - /// However, we allow it for all dialects. - SetTimeZone { local: bool, value: Expr }, - /// ```sql - /// SET NAMES 'charset_name' [COLLATE 'collation_name'] - /// ``` - SetNames { - charset_name: Ident, - collation_name: Option, - }, - /// ```sql - /// SET NAMES DEFAULT - /// ``` - /// - /// Note: this is a MySQL-specific statement. - SetNamesDefault {}, - /// ```sql - /// SET TRANSACTION ... - /// ``` - SetTransaction { - modes: Vec, - snapshot: Option, - session: bool, - }, -} - -impl Display for Set { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Self::ParenthesizedAssignments { variables, values } => write!( - f, - "SET ({}) = ({})", - display_comma_separated(variables), - display_comma_separated(values) - ), - Self::MultipleAssignments { assignments } => { - write!(f, "SET {}", display_comma_separated(assignments)) - } - Self::SetRole { - context_modifier, - role_name, - } => { - let role_name = role_name.clone().unwrap_or_else(|| Ident::new("NONE")); - write!( - f, - "SET {modifier}ROLE {role_name}", - modifier = context_modifier.map(|m| format!("{m}")).unwrap_or_default() - ) - } - Self::SetSessionParam(kind) => write!(f, "SET {kind}"), - Self::SetTransaction { - modes, - snapshot, - session, - } => { - if *session { - write!(f, "SET SESSION CHARACTERISTICS AS TRANSACTION")?; - } else { - write!(f, "SET TRANSACTION")?; - } - if !modes.is_empty() { - write!(f, " {}", display_comma_separated(modes))?; - } - if let Some(snapshot_id) = snapshot { - write!(f, " SNAPSHOT {snapshot_id}")?; - } - Ok(()) - } - Self::SetTimeZone { local, value } => { - f.write_str("SET ")?; - if *local { - f.write_str("LOCAL ")?; - } - write!(f, "TIME ZONE {value}") - } - Self::SetNames { - charset_name, - collation_name, - } => { - write!(f, "SET NAMES {charset_name}")?; - - if let Some(collation) = collation_name { - f.write_str(" COLLATE ")?; - f.write_str(collation)?; - }; - - Ok(()) - } - Self::SetNamesDefault {} => { - f.write_str("SET NAMES DEFAULT")?; - - Ok(()) - } - Set::SingleAssignment { - scope, - hivevar, - variable, - values, - } => { - write!( - f, - "SET {}{}{} = {}", - scope.map(|s| format!("{s}")).unwrap_or_default(), - if *hivevar { "HIVEVAR:" } else { "" }, - variable, - display_comma_separated(values) - ) - } - } - } -} - -/// Convert a `Set` into a `Statement`. -/// Convenience function, instead of writing `Statement::Set(Set::Set...{...})` -impl From for Statement { - fn from(set: Set) -> Self { - Statement::Set(set) - } -} - -/// A representation of a `WHEN` arm with all the identifiers catched and the statements to execute -/// for the arm. -/// -/// Snowflake: -/// BigQuery: -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct ExceptionWhen { - pub idents: Vec, - pub statements: Vec, -} - -impl Display for ExceptionWhen { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!( - f, - "WHEN {idents} THEN", - idents = display_separated(&self.idents, " OR ") - )?; - - if !self.statements.is_empty() { - write!(f, " ")?; - format_statement_list(f, &self.statements)?; - } - - Ok(()) - } -} - /// A top-level statement (SELECT, INSERT, CREATE, etc.) #[allow(clippy::large_enum_variant)] #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -3097,9 +2242,7 @@ pub enum Statement { cache_metadata: bool, noscan: bool, compute_statistics: bool, - has_table_keyword: bool, }, - Set(Set), /// ```sql /// TRUNCATE /// ``` @@ -3110,11 +2253,14 @@ pub enum Statement { /// TABLE - optional keyword; table: bool, /// Postgres-specific option + /// [ TRUNCATE TABLE ONLY ] + only: bool, + /// Postgres-specific option /// [ RESTART IDENTITY | CONTINUE IDENTITY ] identity: Option, /// Postgres-specific option /// [ CASCADE | RESTRICT ] - cascade: Option, + cascade: Option, /// ClickHouse-specific option /// [ ON CLUSTER cluster_name ] /// @@ -3161,14 +2307,6 @@ pub enum Statement { file_format: Option, source: Box, }, - /// A `CASE` statement. - Case(CaseStatement), - /// An `IF` statement. - If(IfStatement), - /// A `WHILE` statement. - While(WhileStatement), - /// A `RAISE` statement. - Raise(RaiseStatement), /// ```sql /// CALL /// ``` @@ -3191,38 +2329,26 @@ pub enum Statement { values: Vec>, }, /// ```sql - /// COPY INTO | + /// COPY INTO /// ``` - /// See: - /// - /// - /// + /// See /// Copy Into syntax available for Snowflake is different than the one implemented in /// Postgres. Although they share common prefix, it is reasonable to implement them /// in different enums. This can be refactored later once custom dialects /// are allowed to have custom Statements. CopyIntoSnowflake { - kind: CopyIntoSnowflakeKind, into: ObjectName, - into_columns: Option>, - from_obj: Option, - from_obj_alias: Option, + from_stage: ObjectName, + from_stage_alias: Option, stage_params: StageParamsObject, - from_transformations: Option>, - from_query: Option>, + from_transformations: Option>, files: Option>, pattern: Option, - file_format: KeyValueOptions, - copy_options: KeyValueOptions, + file_format: DataLoadingOptions, + copy_options: DataLoadingOptions, validation_mode: Option, - partition: Option>, }, /// ```sql - /// OPEN cursor_name - /// ``` - /// Opens a cursor. - Open(OpenStatement), - /// ```sql /// CLOSE /// ``` /// Closes the portal underlying an open cursor. @@ -3239,13 +2365,11 @@ pub enum Statement { /// Column assignments assignments: Vec, /// Table which provide value to be set - from: Option, + from: Option, /// WHERE selection: Option, /// RETURNING returning: Option>, - /// SQLite-specific conflict resolution clause - or: Option, }, /// ```sql /// DELETE @@ -3255,10 +2379,6 @@ pub enum Statement { /// CREATE VIEW /// ``` CreateView { - /// True if this is a `CREATE OR ALTER VIEW` statement - /// - /// [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-view-transact-sql) - or_alter: bool, or_replace: bool, materialized: bool, /// View name @@ -3279,8 +2399,6 @@ pub enum Statement { /// if not None, has Clickhouse `TO` clause, specify the table into which to insert results /// to: Option, - /// MySQL: Optional parameters for the view algorithm, definer, and security context - params: Option, }, /// ```sql /// CREATE TABLE @@ -3304,7 +2422,7 @@ pub enum Statement { /// ```sql /// CREATE ROLE /// ``` - /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createrole.html) + /// See [postgres](https://www.postgresql.org/docs/current/sql-createrole.html) CreateRole { names: Vec, if_not_exists: bool, @@ -3330,7 +2448,7 @@ pub enum Statement { /// ```sql /// CREATE SECRET /// ``` - /// See [DuckDB](https://duckdb.org/docs/sql/statements/create_secret.html) + /// See [duckdb](https://duckdb.org/docs/sql/statements/create_secret.html) CreateSecret { or_replace: bool, temporary: Option, @@ -3340,8 +2458,6 @@ pub enum Statement { secret_type: Ident, options: Vec, }, - /// A `CREATE SERVER` statement. - CreateServer(CreateServerStatement), /// ```sql /// CREATE POLICY /// ``` @@ -3357,11 +2473,6 @@ pub enum Statement { with_check: Option, }, /// ```sql - /// CREATE CONNECTOR - /// ``` - /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector) - CreateConnector(CreateConnector), - /// ```sql /// ALTER TABLE /// ``` AlterTable { @@ -3376,9 +2487,6 @@ pub enum Statement { /// For example: `ALTER TABLE table_name ON CLUSTER cluster_name ADD COLUMN c UInt32` /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/alter/update) on_cluster: Option, - /// Snowflake "ICEBERG" clause for Iceberg tables - /// - iceberg: bool, }, /// ```sql /// ALTER INDEX @@ -3399,11 +2507,6 @@ pub enum Statement { with_options: Vec, }, /// ```sql - /// ALTER TYPE - /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-altertype.html) - /// ``` - AlterType(AlterType), - /// ```sql /// ALTER ROLE /// ``` AlterRole { @@ -3421,31 +2524,6 @@ pub enum Statement { operation: AlterPolicyOperation, }, /// ```sql - /// ALTER CONNECTOR connector_name SET DCPROPERTIES(property_name=property_value, ...); - /// or - /// ALTER CONNECTOR connector_name SET URL new_url; - /// or - /// ALTER CONNECTOR connector_name SET OWNER [USER|ROLE] user_or_role; - /// ``` - /// (Hive-specific) - AlterConnector { - name: Ident, - properties: Option>, - url: Option, - owner: Option, - }, - /// ```sql - /// ALTER SESSION SET sessionParam - /// ALTER SESSION UNSET [ , , ... ] - /// ``` - /// See - AlterSession { - /// true is to set for the session parameters, false is to unset - set: bool, - /// The session parameters to set or unset - session_params: KeyValueOptions, - }, - /// ```sql /// ATTACH DATABASE 'path/to/file' AS alias /// ``` /// (SQLite-specific) @@ -3503,9 +2581,6 @@ pub enum Statement { purge: bool, /// MySQL-specific "TEMPORARY" keyword temporary: bool, - /// MySQL-specific drop index syntax, which requires table specification - /// See - table: Option, }, /// ```sql /// DROP FUNCTION @@ -3515,17 +2590,9 @@ pub enum Statement { /// One or more function to drop func_desc: Vec, /// `CASCADE` or `RESTRICT` - drop_behavior: Option, + option: Option, }, /// ```sql - /// DROP DOMAIN - /// ``` - /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-dropdomain.html) - /// - /// DROP DOMAIN [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ] - /// - DropDomain(DropDomain), - /// ```sql /// DROP PROCEDURE /// ``` DropProcedure { @@ -3533,7 +2600,7 @@ pub enum Statement { /// One or more function to drop proc_desc: Vec, /// `CASCADE` or `RESTRICT` - drop_behavior: Option, + option: Option, }, /// ```sql /// DROP SECRET @@ -3552,15 +2619,7 @@ pub enum Statement { if_exists: bool, name: Ident, table_name: ObjectName, - drop_behavior: Option, - }, - /// ```sql - /// DROP CONNECTOR - /// ``` - /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector) - DropConnector { - if_exists: bool, - name: Ident, + option: Option, }, /// ```sql /// DECLARE @@ -3569,9 +2628,7 @@ pub enum Statement { /// /// Note: this is a PostgreSQL-specific statement, /// but may also compatible with other SQL. - Declare { - stmts: Vec, - }, + Declare { stmts: Vec }, /// ```sql /// CREATE EXTENSION [ IF NOT EXISTS ] extension_name /// [ WITH ] [ SCHEMA schema_name ] @@ -3588,18 +2645,6 @@ pub enum Statement { version: Option, }, /// ```sql - /// DROP EXTENSION [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ] - /// - /// Note: this is a PostgreSQL-specific statement. - /// https://www.postgresql.org/docs/current/sql-dropextension.html - /// ``` - DropExtension { - names: Vec, - if_exists: bool, - /// `CASCADE` or `RESTRICT` - cascade_or_restrict: Option, - }, - /// ```sql /// FETCH /// ``` /// Retrieve rows from a query using a cursor @@ -3610,7 +2655,6 @@ pub enum Statement { /// Cursor name name: Ident, direction: FetchDirection, - position: FetchPosition, /// Optional, It's possible to fetch rows form cursor to the table into: Option, }, @@ -3634,23 +2678,69 @@ pub enum Statement { /// /// Note: this is a PostgreSQL-specific statement, /// but may also compatible with other SQL. - Discard { - object_type: DiscardObject, + Discard { object_type: DiscardObject }, + /// ```sql + /// SET [ SESSION | LOCAL ] ROLE role_name + /// ``` + /// + /// Sets session state. Examples: [ANSI][1], [Postgresql][2], [MySQL][3], and [Oracle][4] + /// + /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#set-role-statement + /// [2]: https://www.postgresql.org/docs/14/sql-set-role.html + /// [3]: https://dev.mysql.com/doc/refman/8.0/en/set-role.html + /// [4]: https://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_10004.htm + SetRole { + /// Non-ANSI optional identifier to inform if the role is defined inside the current session (`SESSION`) or transaction (`LOCAL`). + context_modifier: ContextModifier, + /// Role name. If NONE is specified, then the current role name is removed. + role_name: Option, }, + /// ```sql + /// SET = expression; + /// SET (variable[, ...]) = (expression[, ...]); + /// ``` + /// + /// Note: this is not a standard SQL statement, but it is supported by at + /// least MySQL and PostgreSQL. Not all MySQL-specific syntactic forms are + /// supported yet. + SetVariable { + local: bool, + hivevar: bool, + variables: OneOrManyWithParens, + value: Vec, + }, + /// ```sql + /// SET TIME ZONE + /// ``` + /// + /// Note: this is a PostgreSQL-specific statements + /// `SET TIME ZONE ` is an alias for `SET timezone TO ` in PostgreSQL + SetTimeZone { local: bool, value: Expr }, + /// ```sql + /// SET NAMES 'charset_name' [COLLATE 'collation_name'] + /// ``` + /// + /// Note: this is a MySQL-specific statement. + SetNames { + charset_name: String, + collation_name: Option, + }, + /// ```sql + /// SET NAMES DEFAULT + /// ``` + /// + /// Note: this is a MySQL-specific statement. + SetNamesDefault {}, /// `SHOW FUNCTIONS` /// /// Note: this is a Presto-specific statement. - ShowFunctions { - filter: Option, - }, + ShowFunctions { filter: Option }, /// ```sql /// SHOW /// ``` /// /// Note: this is a PostgreSQL-specific statement. - ShowVariable { - variable: Vec, - }, + ShowVariable { variable: Vec }, /// ```sql /// SHOW [GLOBAL | SESSION] STATUS [LIKE 'pattern' | WHERE expr] /// ``` @@ -3683,60 +2773,48 @@ pub enum Statement { /// ```sql /// SHOW COLUMNS /// ``` + /// + /// Note: this is a MySQL-specific statement. ShowColumns { extended: bool, full: bool, - show_options: ShowStatementOptions, + #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] + table_name: ObjectName, + filter: Option, }, /// ```sql - /// SHOW DATABASES + /// SHOW DATABASES [LIKE 'pattern'] /// ``` - ShowDatabases { - terse: bool, - history: bool, - show_options: ShowStatementOptions, - }, + ShowDatabases { filter: Option }, /// ```sql - /// SHOW SCHEMAS + /// SHOW SCHEMAS [LIKE 'pattern'] /// ``` - ShowSchemas { - terse: bool, - history: bool, - show_options: ShowStatementOptions, - }, - /// ```sql - /// SHOW OBJECTS LIKE 'line%' IN mydb.public - /// ``` - /// Snowflake-specific statement - /// - ShowObjects(ShowObjects), + ShowSchemas { filter: Option }, /// ```sql /// SHOW TABLES /// ``` ShowTables { - terse: bool, - history: bool, extended: bool, full: bool, - external: bool, - show_options: ShowStatementOptions, + clause: Option, + db_name: Option, + filter: Option, }, /// ```sql /// SHOW VIEWS /// ``` ShowViews { - terse: bool, materialized: bool, - show_options: ShowStatementOptions, + clause: Option, + db_name: Option, + filter: Option, }, /// ```sql /// SHOW COLLATION /// ``` /// /// Note: this is a MySQL-specific statement. - ShowCollation { - filter: Option, - }, + ShowCollation { filter: Option }, /// ```sql /// `USE ...` /// ``` @@ -3753,33 +2831,16 @@ pub enum Statement { StartTransaction { modes: Vec, begin: bool, - transaction: Option, + /// Only for SQLite modifier: Option, - /// List of statements belonging to the `BEGIN` block. - /// Example: - /// ```sql - /// BEGIN - /// SELECT 1; - /// SELECT 2; - /// END; - /// ``` - statements: Vec, - /// Exception handling with exception clauses. - /// Example: - /// ```sql - /// EXCEPTION - /// WHEN EXCEPTION_1 THEN - /// SELECT 2; - /// WHEN EXCEPTION_2 OR EXCEPTION_3 THEN - /// SELECT 3; - /// WHEN OTHER THEN - /// SELECT 4; - /// ``` - /// - /// - exception: Option>, - /// TRUE if the statement has an `END` keyword. - has_end_keyword: bool, + }, + /// ```sql + /// SET TRANSACTION ... + /// ``` + SetTransaction { + modes: Vec, + snapshot: Option, + session: bool, }, /// ```sql /// COMMENT ON ... @@ -3797,17 +2858,7 @@ pub enum Statement { /// ```sql /// COMMIT [ TRANSACTION | WORK ] [ AND [ NO ] CHAIN ] /// ``` - /// If `end` is false - /// - /// ```sql - /// END [ TRY | CATCH ] - /// ``` - /// If `end` is true - Commit { - chain: bool, - end: bool, - modifier: Option, - }, + Commit { chain: bool }, /// ```sql /// ROLLBACK [ TRANSACTION | WORK ] [ AND [ NO ] CHAIN ] [ TO [ SAVEPOINT ] savepoint_name ] /// ``` @@ -3822,30 +2873,6 @@ pub enum Statement { /// ` | AUTHORIZATION | AUTHORIZATION ` schema_name: SchemaName, if_not_exists: bool, - /// Schema properties. - /// - /// ```sql - /// CREATE SCHEMA myschema WITH (key1='value1'); - /// ``` - /// - /// [Trino](https://trino.io/docs/current/sql/create-schema.html) - with: Option>, - /// Schema options. - /// - /// ```sql - /// CREATE SCHEMA myschema OPTIONS(key1='value1'); - /// ``` - /// - /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_schema_statement) - options: Option>, - /// Default collation specification for the schema. - /// - /// ```sql - /// CREATE SCHEMA myschema DEFAULT COLLATE 'und:ci'; - /// ``` - /// - /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_schema_statement) - default_collate_spec: Option, }, /// ```sql /// CREATE DATABASE @@ -3862,10 +2889,66 @@ pub enum Statement { /// /// Supported variants: /// 1. [Hive](https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction) - /// 2. [PostgreSQL](https://www.postgresql.org/docs/15/sql-createfunction.html) + /// 2. [Postgres](https://www.postgresql.org/docs/15/sql-createfunction.html) /// 3. [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement) - /// 4. [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql) - CreateFunction(CreateFunction), + CreateFunction { + or_replace: bool, + temporary: bool, + if_not_exists: bool, + name: ObjectName, + args: Option>, + return_type: Option, + /// The expression that defines the function. + /// + /// Examples: + /// ```sql + /// AS ((SELECT 1)) + /// AS "console.log();" + /// ``` + function_body: Option, + /// Behavior attribute for the function + /// + /// IMMUTABLE | STABLE | VOLATILE + /// + /// [Postgres](https://www.postgresql.org/docs/current/sql-createfunction.html) + behavior: Option, + /// CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT + /// + /// [Postgres](https://www.postgresql.org/docs/current/sql-createfunction.html) + called_on_null: Option, + /// PARALLEL { UNSAFE | RESTRICTED | SAFE } + /// + /// [Postgres](https://www.postgresql.org/docs/current/sql-createfunction.html) + parallel: Option, + /// USING ... (Hive only) + using: Option, + /// Language used in a UDF definition. + /// + /// Example: + /// ```sql + /// CREATE FUNCTION foo() LANGUAGE js AS "console.log();" + /// ``` + /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_a_javascript_udf) + language: Option, + /// Determinism keyword used for non-sql UDF definitions. + /// + /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_11) + determinism_specifier: Option, + /// List of options for creating the function. + /// + /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_11) + options: Option>, + /// Connection resource for a remote function. + /// + /// Example: + /// ```sql + /// CREATE FUNCTION foo() + /// RETURNS FLOAT64 + /// REMOTE WITH CONNECTION us.myconnection + /// ``` + /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_a_remote_function) + remote_connection: Option, + }, /// CREATE TRIGGER /// /// Examples: @@ -3878,12 +2961,7 @@ pub enum Statement { /// ``` /// /// Postgres: - /// SQL Server: CreateTrigger { - /// True if this is a `CREATE OR ALTER TRIGGER` statement - /// - /// [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-trigger-transact-sql?view=sql-server-ver16#arguments) - or_alter: bool, /// The `OR REPLACE` clause is used to re-create the trigger if it already exists. /// /// Example: @@ -3944,9 +3022,7 @@ pub enum Statement { /// Triggering conditions condition: Option, /// Execute logic block - exec_body: Option, - /// For SQL dialects with statement(s) for a body - statements: Option, + exec_body: TriggerExecBody, /// The characteristic of the trigger, which include whether the trigger is `DEFERRABLE`, `INITIALLY DEFERRED`, or `INITIALLY IMMEDIATE`, characteristics: Option, }, @@ -3959,7 +3035,7 @@ pub enum Statement { DropTrigger { if_exists: bool, trigger_name: ObjectName, - table_name: Option, + table_name: ObjectName, /// `CASCADE` or `RESTRICT` option: Option, }, @@ -3970,8 +3046,7 @@ pub enum Statement { or_alter: bool, name: ObjectName, params: Option>, - language: Option, - body: ConditionalStatements, + body: Vec, }, /// ```sql /// CREATE MACRO @@ -3996,9 +3071,9 @@ pub enum Statement { if_not_exists: bool, name: ObjectName, stage_params: StageParamsObject, - directory_table_params: KeyValueOptions, - file_format: KeyValueOptions, - copy_options: KeyValueOptions, + directory_table_params: DataLoadingOptions, + file_format: DataLoadingOptions, + copy_options: DataLoadingOptions, comment: Option, }, /// ```sql @@ -4013,52 +3088,36 @@ pub enum Statement { /// ``` Grant { privileges: Privileges, - objects: Option, - grantees: Vec, + objects: GrantObjects, + grantees: Vec, with_grant_option: bool, - as_grantor: Option, granted_by: Option, - current_grants: Option, }, /// ```sql - /// DENY privileges ON object TO grantees - /// ``` - Deny(DenyStatement), - /// ```sql /// REVOKE privileges ON objects FROM grantees /// ``` Revoke { privileges: Privileges, - objects: Option, - grantees: Vec, + objects: GrantObjects, + grantees: Vec, granted_by: Option, - cascade: Option, + cascade: bool, }, /// ```sql /// DEALLOCATE [ PREPARE ] { name | ALL } /// ``` /// /// Note: this is a PostgreSQL-specific statement. - Deallocate { - name: Ident, - prepare: bool, - }, + Deallocate { name: Ident, prepare: bool }, /// ```sql - /// An `EXECUTE` statement + /// EXECUTE name [ ( parameter [, ...] ) ] [USING ] /// ``` /// - /// Postgres: - /// MSSQL: - /// BigQuery: - /// Snowflake: + /// Note: this is a PostgreSQL-specific statement. Execute { - name: Option, + name: Ident, parameters: Vec, - has_parentheses: bool, - /// Is this an `EXECUTE IMMEDIATE` - immediate: bool, - into: Vec, - using: Vec, + using: Vec, }, /// ```sql /// PREPARE name [ ( data_type [, ...] ) ] AS statement @@ -4074,7 +3133,7 @@ pub enum Statement { /// KILL [CONNECTION | QUERY | MUTATION] /// ``` /// - /// See + /// See /// See Kill { modifier: Option, @@ -4114,9 +3173,6 @@ pub enum Statement { /// /// [SQLite](https://sqlite.org/lang_explain.html) query_plan: bool, - /// `EXPLAIN ESTIMATE` - /// [Clickhouse](https://clickhouse.com/docs/en/sql-reference/statements/explain#explain-estimate) - estimate: bool, /// A SQL query that specifies what to explain statement: Box, /// Optional output format of explain @@ -4128,15 +3184,11 @@ pub enum Statement { /// SAVEPOINT /// ``` /// Define a new savepoint within the current transaction - Savepoint { - name: Ident, - }, + Savepoint { name: Ident }, /// ```sql /// RELEASE [ SAVEPOINT ] savepoint_name /// ``` - ReleaseSavepoint { - name: Ident, - }, + ReleaseSavepoint { name: Ident }, /// A `MERGE` statement. /// /// ```sql @@ -4144,7 +3196,6 @@ pub enum Statement { /// ``` /// [Snowflake](https://docs.snowflake.com/en/sql-reference/sql/merge) /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#merge_statement) - /// [MSSQL](https://learn.microsoft.com/en-us/sql/t-sql/statements/merge-transact-sql?view=sql-server-ver16) Merge { /// optional INTO keyword into: bool, @@ -4156,8 +3207,6 @@ pub enum Statement { on: Box, /// Specifies the actions to perform when values match or do not match. clauses: Vec, - // Specifies the output to save changes in MSSQL - output: Option, }, /// ```sql /// CACHE [ FLAG ] TABLE [ OPTIONS('K1' = 'V1', 'K2' = V2) ] [ AS ] [ ] @@ -4200,8 +3249,6 @@ pub enum Statement { sequence_options: Vec, owned_by: Option, }, - /// A `CREATE DOMAIN` statement. - CreateDomain(CreateDomain), /// ```sql /// CREATE TYPE /// ``` @@ -4221,9 +3268,7 @@ pub enum Statement { /// LOCK TABLES [READ [LOCAL] | [LOW_PRIORITY] WRITE] /// ``` /// Note: this is a MySQL-specific statement. See - LockTables { - tables: Vec, - }, + LockTables { tables: Vec }, /// ```sql /// UNLOCK TABLES /// ``` @@ -4257,148 +3302,20 @@ pub enum Statement { /// listen for a notification channel /// /// See Postgres - LISTEN { - channel: Ident, - }, - /// ```sql - /// UNLISTEN - /// ``` - /// stop listening for a notification - /// - /// See Postgres - UNLISTEN { - channel: Ident, - }, + LISTEN { channel: Ident }, /// ```sql /// NOTIFY channel [ , payload ] /// ``` - /// send a notification event together with an optional "payload" string to channel + /// send a notification event together with an optional “payload” string to channel /// /// See Postgres NOTIFY { channel: Ident, payload: Option, }, - /// ```sql - /// LOAD DATA [LOCAL] INPATH 'filepath' [OVERWRITE] INTO TABLE tablename - /// [PARTITION (partcol1=val1, partcol2=val2 ...)] - /// [INPUTFORMAT 'inputformat' SERDE 'serde'] - /// ``` - /// Loading files into tables - /// - /// See Hive - LoadData { - local: bool, - inpath: String, - overwrite: bool, - table_name: ObjectName, - partitioned: Option>, - table_format: Option, - }, - /// ```sql - /// Rename TABLE tbl_name TO new_tbl_name[, tbl_name2 TO new_tbl_name2] ... - /// ``` - /// Renames one or more tables - /// - /// See Mysql - RenameTable(Vec), - /// Snowflake `LIST` - /// See: - List(FileStagingCommand), - /// Snowflake `REMOVE` - /// See: - Remove(FileStagingCommand), - /// RaiseError (MSSQL) - /// RAISERROR ( { msg_id | msg_str | @local_variable } - /// { , severity , state } - /// [ , argument [ , ...n ] ] ) - /// [ WITH option [ , ...n ] ] - /// See - RaisError { - message: Box, - severity: Box, - state: Box, - arguments: Vec, - options: Vec, - }, - /// ```sql - /// PRINT msg_str | @local_variable | string_expr - /// ``` - /// - /// See: - Print(PrintStatement), - /// ```sql - /// RETURN [ expression ] - /// ``` - /// - /// See [ReturnStatement] - Return(ReturnStatement), -} - -/// ```sql -/// {COPY | REVOKE} CURRENT GRANTS -/// ``` -/// -/// - [Snowflake](https://docs.snowflake.com/en/sql-reference/sql/grant-ownership#optional-parameters) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum CurrentGrantsKind { - CopyCurrentGrants, - RevokeCurrentGrants, -} - -impl fmt::Display for CurrentGrantsKind { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - CurrentGrantsKind::CopyCurrentGrants => write!(f, "COPY CURRENT GRANTS"), - CurrentGrantsKind::RevokeCurrentGrants => write!(f, "REVOKE CURRENT GRANTS"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum RaisErrorOption { - Log, - NoWait, - SetError, -} - -impl fmt::Display for RaisErrorOption { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - RaisErrorOption::Log => write!(f, "LOG"), - RaisErrorOption::NoWait => write!(f, "NOWAIT"), - RaisErrorOption::SetError => write!(f, "SETERROR"), - } - } } impl fmt::Display for Statement { - /// Formats a SQL statement with support for pretty printing. - /// - /// When using the alternate flag (`{:#}`), the statement will be formatted with proper - /// indentation and line breaks. For example: - /// - /// ``` - /// # use sqlparser::dialect::GenericDialect; - /// # use sqlparser::parser::Parser; - /// let sql = "SELECT a, b FROM table_1"; - /// let ast = Parser::parse_sql(&GenericDialect, sql).unwrap(); - /// - /// // Regular formatting - /// assert_eq!(format!("{}", ast[0]), "SELECT a, b FROM table_1"); - /// - /// // Pretty printing - /// assert_eq!(format!("{:#}", ast[0]), - /// r#"SELECT - /// a, - /// b - /// FROM - /// table_1"#); - /// ``` // Clippy thinks this function is too complicated, but it is painful to // split up without extracting structs for each `Statement` variant. #[allow(clippy::cognitive_complexity)] @@ -4414,8 +3331,7 @@ impl fmt::Display for Statement { } => { write!(f, "FLUSH")?; if let Some(location) = location { - f.write_str(" ")?; - location.fmt(f)?; + write!(f, " {location}")?; } write!(f, " {object_type}")?; @@ -4453,7 +3369,7 @@ impl fmt::Display for Statement { write!(f, "{describe_alias} ")?; if let Some(format) = hive_format { - write!(f, "{format} ")?; + write!(f, "{} ", format)?; } if *has_table_keyword { write!(f, "TABLE ")?; @@ -4466,7 +3382,6 @@ impl fmt::Display for Statement { verbose, analyze, query_plan, - estimate, statement, format, options, @@ -4479,9 +3394,6 @@ impl fmt::Display for Statement { if *analyze { write!(f, "ANALYZE ")?; } - if *estimate { - write!(f, "ESTIMATE ")?; - } if *verbose { write!(f, "VERBOSE ")?; @@ -4497,7 +3409,7 @@ impl fmt::Display for Statement { write!(f, "{statement}") } - Statement::Query(s) => s.fmt(f), + Statement::Query(s) => write!(f, "{s}"), Statement::Declare { stmts } => { write!(f, "DECLARE ")?; write!(f, "{}", display_separated(stmts, "; ")) @@ -4505,10 +3417,11 @@ impl fmt::Display for Statement { Statement::Fetch { name, direction, - position, into, } => { - write!(f, "FETCH {direction} {position} {name}")?; + write!(f, "FETCH {direction} ")?; + + write!(f, "IN {name}")?; if let Some(into) = into { write!(f, " INTO {into}")?; @@ -4555,15 +3468,17 @@ impl fmt::Display for Statement { table_names, partitions, table, + only, identity, cascade, on_cluster, } => { let table = if *table { "TABLE " } else { "" }; + let only = if *only { "ONLY " } else { "" }; write!( f, - "TRUNCATE {table}{table_names}", + "TRUNCATE {table}{only}{table_names}", table_names = display_comma_separated(table_names) )?; @@ -4575,8 +3490,8 @@ impl fmt::Display for Statement { } if let Some(cascade) = cascade { match cascade { - CascadeOption::Cascade => write!(f, " CASCADE")?, - CascadeOption::Restrict => write!(f, " RESTRICT")?, + TruncateCascadeOption::Cascade => write!(f, " CASCADE")?, + TruncateCascadeOption::Restrict => write!(f, " RESTRICT")?, } } @@ -4590,18 +3505,6 @@ impl fmt::Display for Statement { } Ok(()) } - Statement::Case(stmt) => { - write!(f, "{stmt}") - } - Statement::If(stmt) => { - write!(f, "{stmt}") - } - Statement::While(stmt) => { - write!(f, "{stmt}") - } - Statement::Raise(stmt) => { - write!(f, "{stmt}") - } Statement::AttachDatabase { schema_name, database_file_name, @@ -4652,13 +3555,8 @@ impl fmt::Display for Statement { cache_metadata, noscan, compute_statistics, - has_table_keyword, } => { - write!( - f, - "ANALYZE{}{table_name}", - if *has_table_keyword { " TABLE " } else { " " } - )?; + write!(f, "ANALYZE TABLE {table_name}")?; if let Some(ref parts) = partitions { if !parts.is_empty() { write!(f, " PARTITION ({})", display_comma_separated(parts))?; @@ -4682,7 +3580,7 @@ impl fmt::Display for Statement { } Ok(()) } - Statement::Insert(insert) => insert.fmt(f), + Statement::Insert(insert) => write!(f, "{insert}"), Statement::Install { extension_name: name, } => write!(f, "INSTALL {name}"), @@ -4743,44 +3641,23 @@ impl fmt::Display for Statement { from, selection, returning, - or, } => { - f.write_str("UPDATE ")?; - if let Some(or) = or { - or.fmt(f)?; - f.write_str(" ")?; - } - table.fmt(f)?; - if let Some(UpdateTableFromKind::BeforeSet(from)) = from { - SpaceOrNewline.fmt(f)?; - f.write_str("FROM")?; - indented_list(f, from)?; - } + write!(f, "UPDATE {table}")?; if !assignments.is_empty() { - SpaceOrNewline.fmt(f)?; - f.write_str("SET")?; - indented_list(f, assignments)?; + write!(f, " SET {}", display_comma_separated(assignments))?; } - if let Some(UpdateTableFromKind::AfterSet(from)) = from { - SpaceOrNewline.fmt(f)?; - f.write_str("FROM")?; - indented_list(f, from)?; + if let Some(from) = from { + write!(f, " FROM {from}")?; } if let Some(selection) = selection { - SpaceOrNewline.fmt(f)?; - f.write_str("WHERE")?; - SpaceOrNewline.fmt(f)?; - Indent(selection).fmt(f)?; + write!(f, " WHERE {selection}")?; } if let Some(returning) = returning { - SpaceOrNewline.fmt(f)?; - f.write_str("RETURNING")?; - indented_list(f, returning)?; + write!(f, " RETURNING {}", display_comma_separated(returning))?; } Ok(()) } - Statement::Delete(delete) => delete.fmt(f), - Statement::Open(open) => open.fmt(f), + Statement::Delete(delete) => write!(f, "{delete}"), Statement::Close { cursor } => { write!(f, "CLOSE {cursor}")?; @@ -4805,10 +3682,76 @@ impl fmt::Display for Statement { } Ok(()) } - Statement::CreateFunction(create_function) => create_function.fmt(f), - Statement::CreateDomain(create_domain) => create_domain.fmt(f), + Statement::CreateFunction { + or_replace, + temporary, + if_not_exists, + name, + args, + return_type, + function_body, + language, + behavior, + called_on_null, + parallel, + using, + determinism_specifier, + options, + remote_connection, + } => { + write!( + f, + "CREATE {or_replace}{temp}FUNCTION {if_not_exists}{name}", + temp = if *temporary { "TEMPORARY " } else { "" }, + or_replace = if *or_replace { "OR REPLACE " } else { "" }, + if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" }, + )?; + if let Some(args) = args { + write!(f, "({})", display_comma_separated(args))?; + } + if let Some(return_type) = return_type { + write!(f, " RETURNS {return_type}")?; + } + if let Some(determinism_specifier) = determinism_specifier { + write!(f, " {determinism_specifier}")?; + } + if let Some(language) = language { + write!(f, " LANGUAGE {language}")?; + } + if let Some(behavior) = behavior { + write!(f, " {behavior}")?; + } + if let Some(called_on_null) = called_on_null { + write!(f, " {called_on_null}")?; + } + if let Some(parallel) = parallel { + write!(f, " {parallel}")?; + } + if let Some(remote_connection) = remote_connection { + write!(f, " REMOTE WITH CONNECTION {remote_connection}")?; + } + if let Some(CreateFunctionBody::AsBeforeOptions(function_body)) = function_body { + write!(f, " AS {function_body}")?; + } + if let Some(CreateFunctionBody::Return(function_body)) = function_body { + write!(f, " RETURN {function_body}")?; + } + if let Some(using) = using { + write!(f, " {using}")?; + } + if let Some(options) = options { + write!( + f, + " OPTIONS({})", + display_comma_separated(options.as_slice()) + )?; + } + if let Some(CreateFunctionBody::AsAfterOptions(function_body)) = function_body { + write!(f, " AS {function_body}")?; + } + Ok(()) + } Statement::CreateTrigger { - or_alter, or_replace, is_constraint, name, @@ -4821,30 +3764,19 @@ impl fmt::Display for Statement { condition, include_each, exec_body, - statements, characteristics, } => { write!( f, - "CREATE {or_alter}{or_replace}{is_constraint}TRIGGER {name} ", - or_alter = if *or_alter { "OR ALTER " } else { "" }, + "CREATE {or_replace}{is_constraint}TRIGGER {name} {period}", or_replace = if *or_replace { "OR REPLACE " } else { "" }, is_constraint = if *is_constraint { "CONSTRAINT " } else { "" }, )?; - if exec_body.is_some() { - write!(f, "{period}")?; - if !events.is_empty() { - write!(f, " {}", display_separated(events, " OR "))?; - } - write!(f, " ON {table_name}")?; - } else { - write!(f, "ON {table_name}")?; - write!(f, " {period}")?; - if !events.is_empty() { - write!(f, " {}", display_separated(events, ", "))?; - } + if !events.is_empty() { + write!(f, " {}", display_separated(events, " OR "))?; } + write!(f, " ON {table_name}")?; if let Some(referenced_table_name) = referenced_table_name { write!(f, " FROM {referenced_table_name}")?; @@ -4860,19 +3792,13 @@ impl fmt::Display for Statement { if *include_each { write!(f, " FOR EACH {trigger_object}")?; - } else if exec_body.is_some() { + } else { write!(f, " FOR {trigger_object}")?; } if let Some(condition) = condition { write!(f, " WHEN {condition}")?; } - if let Some(exec_body) = exec_body { - write!(f, " EXECUTE {exec_body}")?; - } - if let Some(statements) = statements { - write!(f, " AS {statements}")?; - } - Ok(()) + write!(f, " EXECUTE {exec_body}") } Statement::DropTrigger { if_exists, @@ -4884,10 +3810,7 @@ impl fmt::Display for Statement { if *if_exists { write!(f, " IF EXISTS")?; } - match &table_name { - Some(table_name) => write!(f, " {trigger_name} ON {table_name}")?, - None => write!(f, " {trigger_name}")?, - }; + write!(f, " {trigger_name} ON {table_name}")?; if let Some(option) = option { write!(f, " {option}")?; } @@ -4897,7 +3820,6 @@ impl fmt::Display for Statement { name, or_alter, params, - language, body, } => { write!( @@ -4912,12 +3834,11 @@ impl fmt::Display for Statement { write!(f, " ({})", display_comma_separated(p))?; } } - - if let Some(language) = language { - write!(f, " LANGUAGE {language}")?; - } - - write!(f, " AS {body}") + write!( + f, + " AS BEGIN {body} END", + body = display_separated(body, "; ") + ) } Statement::CreateMacro { or_replace, @@ -4942,7 +3863,6 @@ impl fmt::Display for Statement { Ok(()) } Statement::CreateView { - or_alter, name, or_replace, columns, @@ -4955,20 +3875,11 @@ impl fmt::Display for Statement { if_not_exists, temporary, to, - params, } => { write!( f, - "CREATE {or_alter}{or_replace}", - or_alter = if *or_alter { "OR ALTER " } else { "" }, + "CREATE {or_replace}{materialized}{temporary}VIEW {if_not_exists}{name}{to}", or_replace = if *or_replace { "OR REPLACE " } else { "" }, - )?; - if let Some(params) = params { - params.fmt(f)?; - } - write!( - f, - "{materialized}{temporary}VIEW {if_not_exists}{name}{to}", materialized = if *materialized { "MATERIALIZED " } else { "" }, name = name, temporary = if *temporary { "TEMPORARY " } else { "" }, @@ -4997,45 +3908,13 @@ impl fmt::Display for Statement { if matches!(options, CreateTableOptions::Options(_)) { write!(f, " {options}")?; } - f.write_str(" AS")?; - SpaceOrNewline.fmt(f)?; - query.fmt(f)?; + write!(f, " AS {query}")?; if *with_no_schema_binding { write!(f, " WITH NO SCHEMA BINDING")?; } Ok(()) } Statement::CreateTable(create_table) => create_table.fmt(f), - Statement::LoadData { - local, - inpath, - overwrite, - table_name, - partitioned, - table_format, - } => { - write!( - f, - "LOAD DATA {local}INPATH '{inpath}' {overwrite}INTO TABLE {table_name}", - local = if *local { "LOCAL " } else { "" }, - inpath = inpath, - overwrite = if *overwrite { "OVERWRITE " } else { "" }, - table_name = table_name, - )?; - if let Some(ref parts) = &partitioned { - if !parts.is_empty() { - write!(f, " PARTITION ({})", display_comma_separated(parts))?; - } - } - if let Some(HiveLoadDataFormat { - serde, - input_format, - }) = &table_format - { - write!(f, " INPUTFORMAT {input_format} SERDE {serde}")?; - } - Ok(()) - } Statement::CreateVirtualTable { name, if_not_exists, @@ -5083,21 +3962,6 @@ impl fmt::Display for Statement { Ok(()) } - Statement::DropExtension { - names, - if_exists, - cascade_or_restrict, - } => { - write!(f, "DROP EXTENSION")?; - if *if_exists { - write!(f, " IF EXISTS")?; - } - write!(f, " {}", display_comma_separated(names))?; - if let Some(cascade_or_restrict) = cascade_or_restrict { - write!(f, " {cascade_or_restrict}")?; - } - Ok(()) - } Statement::CreateRole { names, if_not_exists, @@ -5225,9 +4089,6 @@ impl fmt::Display for Statement { write!(f, " )")?; Ok(()) } - Statement::CreateServer(stmt) => { - write!(f, "{stmt}") - } Statement::CreatePolicy { name, table_name, @@ -5270,7 +4131,6 @@ impl fmt::Display for Statement { Ok(()) } - Statement::CreateConnector(create_connector) => create_connector.fmt(f), Statement::AlterTable { name, if_exists, @@ -5278,21 +4138,15 @@ impl fmt::Display for Statement { operations, location, on_cluster, - iceberg, } => { - if *iceberg { - write!(f, "ALTER ICEBERG TABLE ")?; - } else { - write!(f, "ALTER TABLE ")?; - } - + write!(f, "ALTER TABLE ")?; if *if_exists { write!(f, "IF EXISTS ")?; } if *only { write!(f, "ONLY ")?; } - write!(f, "{name} ")?; + write!(f, "{name} ", name = name)?; if let Some(cluster) = on_cluster { write!(f, "ON CLUSTER {cluster} ")?; } @@ -5324,9 +4178,6 @@ impl fmt::Display for Statement { } write!(f, " AS {query}") } - Statement::AlterType(AlterType { name, operation }) => { - write!(f, "ALTER TYPE {name} {operation}") - } Statement::AlterRole { name, operation } => { write!(f, "ALTER ROLE {name} {operation}") } @@ -5337,51 +4188,6 @@ impl fmt::Display for Statement { } => { write!(f, "ALTER POLICY {name} ON {table_name}{operation}") } - Statement::AlterConnector { - name, - properties, - url, - owner, - } => { - write!(f, "ALTER CONNECTOR {name}")?; - if let Some(properties) = properties { - write!( - f, - " SET DCPROPERTIES({})", - display_comma_separated(properties) - )?; - } - if let Some(url) = url { - write!(f, " SET URL '{url}'")?; - } - if let Some(owner) = owner { - write!(f, " SET OWNER {owner}")?; - } - Ok(()) - } - Statement::AlterSession { - set, - session_params, - } => { - write!( - f, - "ALTER SESSION {set}", - set = if *set { "SET" } else { "UNSET" } - )?; - if !session_params.options.is_empty() { - if *set { - write!(f, " {session_params}")?; - } else { - let options = session_params - .options - .iter() - .map(|p| p.option_name.clone()) - .collect::>(); - write!(f, " {}", display_separated(&options, ", "))?; - } - } - Ok(()) - } Statement::Drop { object_type, if_exists, @@ -5390,28 +4196,21 @@ impl fmt::Display for Statement { restrict, purge, temporary, - table, - } => { - write!( - f, - "DROP {}{}{} {}{}{}{}", - if *temporary { "TEMPORARY " } else { "" }, - object_type, - if *if_exists { " IF EXISTS" } else { "" }, - display_comma_separated(names), - if *cascade { " CASCADE" } else { "" }, - if *restrict { " RESTRICT" } else { "" }, - if *purge { " PURGE" } else { "" }, - )?; - if let Some(table_name) = table.as_ref() { - write!(f, " ON {table_name}")?; - }; - Ok(()) - } + } => write!( + f, + "DROP {}{}{} {}{}{}{}", + if *temporary { "TEMPORARY " } else { "" }, + object_type, + if *if_exists { " IF EXISTS" } else { "" }, + display_comma_separated(names), + if *cascade { " CASCADE" } else { "" }, + if *restrict { " RESTRICT" } else { "" }, + if *purge { " PURGE" } else { "" } + ), Statement::DropFunction { if_exists, func_desc, - drop_behavior, + option, } => { write!( f, @@ -5419,22 +4218,7 @@ impl fmt::Display for Statement { if *if_exists { " IF EXISTS" } else { "" }, display_comma_separated(func_desc), )?; - if let Some(op) = drop_behavior { - write!(f, " {op}")?; - } - Ok(()) - } - Statement::DropDomain(DropDomain { - if_exists, - name, - drop_behavior, - }) => { - write!( - f, - "DROP DOMAIN{} {name}", - if *if_exists { " IF EXISTS" } else { "" }, - )?; - if let Some(op) = drop_behavior { + if let Some(op) = option { write!(f, " {op}")?; } Ok(()) @@ -5442,7 +4226,7 @@ impl fmt::Display for Statement { Statement::DropProcedure { if_exists, proc_desc, - drop_behavior, + option, } => { write!( f, @@ -5450,7 +4234,7 @@ impl fmt::Display for Statement { if *if_exists { " IF EXISTS" } else { "" }, display_comma_separated(proc_desc), )?; - if let Some(op) = drop_behavior { + if let Some(op) = option { write!(f, " {op}")?; } Ok(()) @@ -5479,31 +4263,76 @@ impl fmt::Display for Statement { if_exists, name, table_name, - drop_behavior, + option, } => { write!(f, "DROP POLICY")?; if *if_exists { write!(f, " IF EXISTS")?; } write!(f, " {name} ON {table_name}")?; - if let Some(drop_behavior) = drop_behavior { - write!(f, " {drop_behavior}")?; + if let Some(option) = option { + write!(f, " {option}")?; } Ok(()) } - Statement::DropConnector { if_exists, name } => { - write!( - f, - "DROP CONNECTOR {if_exists}{name}", - if_exists = if *if_exists { "IF EXISTS " } else { "" } - )?; - Ok(()) - } Statement::Discard { object_type } => { write!(f, "DISCARD {object_type}")?; Ok(()) } - Self::Set(set) => write!(f, "{set}"), + Self::SetRole { + context_modifier, + role_name, + } => { + let role_name = role_name.clone().unwrap_or_else(|| Ident::new("NONE")); + write!(f, "SET{context_modifier} ROLE {role_name}") + } + Statement::SetVariable { + local, + variables, + hivevar, + value, + } => { + f.write_str("SET ")?; + if *local { + f.write_str("LOCAL ")?; + } + let parenthesized = matches!(variables, OneOrManyWithParens::Many(_)); + write!( + f, + "{hivevar}{name} = {l_paren}{value}{r_paren}", + hivevar = if *hivevar { "HIVEVAR:" } else { "" }, + name = variables, + l_paren = parenthesized.then_some("(").unwrap_or_default(), + value = display_comma_separated(value), + r_paren = parenthesized.then_some(")").unwrap_or_default(), + ) + } + Statement::SetTimeZone { local, value } => { + f.write_str("SET ")?; + if *local { + f.write_str("LOCAL ")?; + } + write!(f, "TIME ZONE {value}") + } + Statement::SetNames { + charset_name, + collation_name, + } => { + f.write_str("SET NAMES ")?; + f.write_str(charset_name)?; + + if let Some(collation) = collation_name { + f.write_str(" COLLATE ")?; + f.write_str(collation)?; + }; + + Ok(()) + } + Statement::SetNamesDefault {} => { + f.write_str("SET NAMES DEFAULT")?; + + Ok(()) + } Statement::ShowVariable { variable } => { write!(f, "SHOW")?; if !variable.is_empty() { @@ -5554,83 +4383,79 @@ impl fmt::Display for Statement { Statement::ShowColumns { extended, full, - show_options, + table_name, + filter, } => { write!( f, - "SHOW {extended}{full}COLUMNS{show_options}", + "SHOW {extended}{full}COLUMNS FROM {table_name}", extended = if *extended { "EXTENDED " } else { "" }, full = if *full { "FULL " } else { "" }, + table_name = table_name, )?; + if let Some(filter) = filter { + write!(f, " {filter}")?; + } Ok(()) } - Statement::ShowDatabases { - terse, - history, - show_options, - } => { - write!( - f, - "SHOW {terse}DATABASES{history}{show_options}", - terse = if *terse { "TERSE " } else { "" }, - history = if *history { " HISTORY" } else { "" }, - )?; + Statement::ShowDatabases { filter } => { + write!(f, "SHOW DATABASES")?; + if let Some(filter) = filter { + write!(f, " {filter}")?; + } Ok(()) } - Statement::ShowSchemas { - terse, - history, - show_options, - } => { - write!( - f, - "SHOW {terse}SCHEMAS{history}{show_options}", - terse = if *terse { "TERSE " } else { "" }, - history = if *history { " HISTORY" } else { "" }, - )?; - Ok(()) - } - Statement::ShowObjects(ShowObjects { - terse, - show_options, - }) => { - write!( - f, - "SHOW {terse}OBJECTS{show_options}", - terse = if *terse { "TERSE " } else { "" }, - )?; + Statement::ShowSchemas { filter } => { + write!(f, "SHOW SCHEMAS")?; + if let Some(filter) = filter { + write!(f, " {filter}")?; + } Ok(()) } Statement::ShowTables { - terse, - history, extended, full, - external, - show_options, + clause: show_clause, + db_name, + filter, } => { write!( f, - "SHOW {terse}{extended}{full}{external}TABLES{history}{show_options}", - terse = if *terse { "TERSE " } else { "" }, + "SHOW {extended}{full}TABLES", extended = if *extended { "EXTENDED " } else { "" }, full = if *full { "FULL " } else { "" }, - external = if *external { "EXTERNAL " } else { "" }, - history = if *history { " HISTORY" } else { "" }, )?; + if let Some(show_clause) = show_clause { + write!(f, " {show_clause}")?; + } + if let Some(db_name) = db_name { + write!(f, " {db_name}")?; + } + if let Some(filter) = filter { + write!(f, " {filter}")?; + } Ok(()) } Statement::ShowViews { - terse, materialized, - show_options, + clause: show_clause, + db_name, + filter, } => { write!( f, - "SHOW {terse}{materialized}VIEWS{show_options}", - terse = if *terse { "TERSE " } else { "" }, - materialized = if *materialized { "MATERIALIZED " } else { "" } + "SHOW {}VIEWS", + if *materialized { "MATERIALIZED " } else { "" } )?; + if let Some(show_clause) = show_clause { + write!(f, " {show_clause}")?; + } + if let Some(db_name) = db_name { + write!(f, " {db_name}")?; + } + if let Some(filter) = filter { + write!(f, " {filter}")?; + } Ok(()) } Statement::ShowFunctions { filter } => { @@ -5651,59 +4476,42 @@ impl fmt::Display for Statement { Statement::StartTransaction { modes, begin: syntax_begin, - transaction, modifier, - statements, - exception, - has_end_keyword, } => { if *syntax_begin { if let Some(modifier) = *modifier { - write!(f, "BEGIN {modifier}")?; + write!(f, "BEGIN {} TRANSACTION", modifier)?; } else { - write!(f, "BEGIN")?; + write!(f, "BEGIN TRANSACTION")?; } } else { - write!(f, "START")?; - } - if let Some(transaction) = transaction { - write!(f, " {transaction}")?; + write!(f, "START TRANSACTION")?; } if !modes.is_empty() { write!(f, " {}", display_comma_separated(modes))?; } - if !statements.is_empty() { - write!(f, " ")?; - format_statement_list(f, statements)?; + Ok(()) + } + Statement::SetTransaction { + modes, + snapshot, + session, + } => { + if *session { + write!(f, "SET SESSION CHARACTERISTICS AS TRANSACTION")?; + } else { + write!(f, "SET TRANSACTION")?; } - if let Some(exception_when) = exception { - write!(f, " EXCEPTION")?; - for when in exception_when { - write!(f, " {when}")?; - } + if !modes.is_empty() { + write!(f, " {}", display_comma_separated(modes))?; } - if *has_end_keyword { - write!(f, " END")?; + if let Some(snapshot_id) = snapshot { + write!(f, " SNAPSHOT {snapshot_id}")?; } Ok(()) } - Statement::Commit { - chain, - end: end_syntax, - modifier, - } => { - if *end_syntax { - write!(f, "END")?; - if let Some(modifier) = *modifier { - write!(f, " {modifier}")?; - } - if *chain { - write!(f, " AND CHAIN")?; - } - } else { - write!(f, "COMMIT{}", if *chain { " AND CHAIN" } else { "" })?; - } - Ok(()) + Statement::Commit { chain } => { + write!(f, "COMMIT{}", if *chain { " AND CHAIN" } else { "" },) } Statement::Rollback { chain, savepoint } => { write!(f, "ROLLBACK")?; @@ -5721,31 +4529,12 @@ impl fmt::Display for Statement { Statement::CreateSchema { schema_name, if_not_exists, - with, - options, - default_collate_spec, - } => { - write!( - f, - "CREATE SCHEMA {if_not_exists}{name}", - if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" }, - name = schema_name - )?; - - if let Some(collate) = default_collate_spec { - write!(f, " DEFAULT COLLATE {collate}")?; - } - - if let Some(with) = with { - write!(f, " WITH ({})", display_comma_separated(with))?; - } - - if let Some(options) = options { - write!(f, " OPTIONS({})", display_comma_separated(options))?; - } - - Ok(()) - } + } => write!( + f, + "CREATE SCHEMA {if_not_exists}{name}", + if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" }, + name = schema_name + ), Statement::Assert { condition, message } => { write!(f, "ASSERT {condition}")?; if let Some(m) = message { @@ -5758,30 +4547,19 @@ impl fmt::Display for Statement { objects, grantees, with_grant_option, - as_grantor, granted_by, - current_grants, } => { write!(f, "GRANT {privileges} ")?; - if let Some(objects) = objects { - write!(f, "ON {objects} ")?; - } + write!(f, "ON {objects} ")?; write!(f, "TO {}", display_comma_separated(grantees))?; if *with_grant_option { write!(f, " WITH GRANT OPTION")?; } - if let Some(current_grants) = current_grants { - write!(f, " {current_grants}")?; - } - if let Some(grantor) = as_grantor { - write!(f, " AS {grantor}")?; - } if let Some(grantor) = granted_by { write!(f, " GRANTED BY {grantor}")?; } Ok(()) } - Statement::Deny(s) => write!(f, "{s}"), Statement::Revoke { privileges, objects, @@ -5790,16 +4568,12 @@ impl fmt::Display for Statement { cascade, } => { write!(f, "REVOKE {privileges} ")?; - if let Some(objects) = objects { - write!(f, "ON {objects} ")?; - } + write!(f, "ON {objects} ")?; write!(f, "FROM {}", display_comma_separated(grantees))?; if let Some(grantor) = granted_by { write!(f, " GRANTED BY {grantor}")?; } - if let Some(cascade) = cascade { - write!(f, " {cascade}")?; - } + write!(f, " {}", if *cascade { "CASCADE" } else { "RESTRICT" })?; Ok(()) } Statement::Deallocate { name, prepare } => write!( @@ -5811,26 +4585,11 @@ impl fmt::Display for Statement { Statement::Execute { name, parameters, - has_parentheses, - immediate, - into, using, } => { - let (open, close) = if *has_parentheses { - ("(", ")") - } else { - (if parameters.is_empty() { "" } else { " " }, "") - }; - write!(f, "EXECUTE")?; - if *immediate { - write!(f, " IMMEDIATE")?; - } - if let Some(name) = name { - write!(f, " {name}")?; - } - write!(f, "{open}{}{close}", display_comma_separated(parameters),)?; - if !into.is_empty() { - write!(f, " INTO {}", display_comma_separated(into))?; + write!(f, "EXECUTE {name}")?; + if !parameters.is_empty() { + write!(f, "({})", display_comma_separated(parameters))?; } if !using.is_empty() { write!(f, " USING {}", display_comma_separated(using))?; @@ -5878,7 +4637,6 @@ impl fmt::Display for Statement { source, on, clauses, - output, } => { write!( f, @@ -5886,11 +4644,7 @@ impl fmt::Display for Statement { int = if *into { " INTO" } else { "" } )?; write!(f, "ON {on} ")?; - write!(f, "{}", display_separated(clauses, " "))?; - if let Some(output) = output { - write!(f, " {output}")?; - } - Ok(()) + write!(f, "{}", display_separated(clauses, " ")) } Statement::Cache { table_name, @@ -5977,13 +4731,13 @@ impl fmt::Display for Statement { if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" }, )?; if !directory_table_params.options.is_empty() { - write!(f, " DIRECTORY=({directory_table_params})")?; + write!(f, " DIRECTORY=({})", directory_table_params)?; } if !file_format.options.is_empty() { - write!(f, " FILE_FORMAT=({file_format})")?; + write!(f, " FILE_FORMAT=({})", file_format)?; } if !copy_options.options.is_empty() { - write!(f, " COPY_OPTIONS=({copy_options})")?; + write!(f, " COPY_OPTIONS=({})", copy_options)?; } if comment.is_some() { write!(f, " COMMENT='{}'", comment.as_ref().unwrap())?; @@ -5991,73 +4745,60 @@ impl fmt::Display for Statement { Ok(()) } Statement::CopyIntoSnowflake { - kind, into, - into_columns, - from_obj, - from_obj_alias, + from_stage, + from_stage_alias, stage_params, from_transformations, - from_query, files, pattern, file_format, copy_options, validation_mode, - partition, } => { - write!(f, "COPY INTO {into}")?; - if let Some(into_columns) = into_columns { - write!(f, " ({})", display_comma_separated(into_columns))?; - } - if let Some(from_transformations) = from_transformations { - // Data load with transformation - if let Some(from_stage) = from_obj { - write!( - f, - " FROM (SELECT {} FROM {}{}", - display_separated(from_transformations, ", "), - from_stage, - stage_params - )?; + write!(f, "COPY INTO {}", into)?; + if from_transformations.is_none() { + // Standard data load + write!(f, " FROM {}{}", from_stage, stage_params)?; + if from_stage_alias.as_ref().is_some() { + write!(f, " AS {}", from_stage_alias.as_ref().unwrap())?; } - if let Some(from_obj_alias) = from_obj_alias { - write!(f, " AS {from_obj_alias}")?; + } else { + // Data load with transformation + write!( + f, + " FROM (SELECT {} FROM {}{}", + display_separated(from_transformations.as_ref().unwrap(), ", "), + from_stage, + stage_params, + )?; + if from_stage_alias.as_ref().is_some() { + write!(f, " AS {}", from_stage_alias.as_ref().unwrap())?; } write!(f, ")")?; - } else if let Some(from_obj) = from_obj { - // Standard data load - write!(f, " FROM {from_obj}{stage_params}")?; - if let Some(from_obj_alias) = from_obj_alias { - write!(f, " AS {from_obj_alias}")?; - } - } else if let Some(from_query) = from_query { - // Data unload from query - write!(f, " FROM ({from_query})")?; } - - if let Some(files) = files { - write!(f, " FILES = ('{}')", display_separated(files, "', '"))?; + if files.is_some() { + write!( + f, + " FILES = ('{}')", + display_separated(files.as_ref().unwrap(), "', '") + )?; } - if let Some(pattern) = pattern { - write!(f, " PATTERN = '{pattern}'")?; - } - if let Some(partition) = partition { - write!(f, " PARTITION BY {partition}")?; + if pattern.is_some() { + write!(f, " PATTERN = '{}'", pattern.as_ref().unwrap())?; } if !file_format.options.is_empty() { - write!(f, " FILE_FORMAT=({file_format})")?; + write!(f, " FILE_FORMAT=({})", file_format)?; } if !copy_options.options.is_empty() { - match kind { - CopyIntoSnowflakeKind::Table => { - write!(f, " COPY_OPTIONS=({copy_options})")? - } - CopyIntoSnowflakeKind::Location => write!(f, " {copy_options}")?, - } + write!(f, " COPY_OPTIONS=({})", copy_options)?; } - if let Some(validation_mode) = validation_mode { - write!(f, " VALIDATION_MODE = {validation_mode}")?; + if validation_mode.is_some() { + write!( + f, + " VALIDATION_MODE = {}", + validation_mode.as_ref().unwrap() + )?; } Ok(()) } @@ -6103,10 +4844,10 @@ impl fmt::Display for Statement { } => { write!(f, "OPTIMIZE TABLE {name}")?; if let Some(on_cluster) = on_cluster { - write!(f, " ON CLUSTER {on_cluster}")?; + write!(f, " ON CLUSTER {on_cluster}", on_cluster = on_cluster)?; } if let Some(partition) = partition { - write!(f, " {partition}")?; + write!(f, " {partition}", partition = partition)?; } if *include_final { write!(f, " FINAL")?; @@ -6120,10 +4861,6 @@ impl fmt::Display for Statement { write!(f, "LISTEN {channel}")?; Ok(()) } - Statement::UNLISTEN { channel } => { - write!(f, "UNLISTEN {channel}")?; - Ok(()) - } Statement::NOTIFY { channel, payload } => { write!(f, "NOTIFY {channel}")?; if let Some(payload) = payload { @@ -6131,30 +4868,6 @@ impl fmt::Display for Statement { } Ok(()) } - Statement::RenameTable(rename_tables) => { - write!(f, "RENAME TABLE {}", display_comma_separated(rename_tables)) - } - Statement::RaisError { - message, - severity, - state, - arguments, - options, - } => { - write!(f, "RAISERROR({message}, {severity}, {state}")?; - if !arguments.is_empty() { - write!(f, ", {}", display_comma_separated(arguments))?; - } - write!(f, ")")?; - if !options.is_empty() { - write!(f, " WITH {}", display_comma_separated(options))?; - } - Ok(()) - } - Statement::Print(s) => write!(f, "{s}"), - Statement::Return(r) => write!(f, "{r}"), - Statement::List(command) => write!(f, "LIST {command}"), - Statement::Remove(command) => write!(f, "REMOVE {command}"), } } } @@ -6218,28 +4931,6 @@ impl fmt::Display for SequenceOptions { } } -/// Assignment for a `SET` statement (name [=|TO] value) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct SetAssignment { - pub scope: Option, - pub name: ObjectName, - pub value: Expr, -} - -impl fmt::Display for SetAssignment { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!( - f, - "{}{} = {}", - self.scope.map(|s| format!("{s}")).unwrap_or_default(), - self.name, - self.value - ) - } -} - /// Target of a `TRUNCATE TABLE` command /// /// Note this is its own struct because `visit_relation` requires an `ObjectName` (not a `Vec`) @@ -6250,17 +4941,10 @@ pub struct TruncateTableTarget { /// name of the table being truncated #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] pub name: ObjectName, - /// Postgres-specific option - /// [ TRUNCATE TABLE ONLY ] - /// - pub only: bool, } impl fmt::Display for TruncateTableTarget { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - if self.only { - write!(f, "ONLY ")?; - }; write!(f, "{}", self.name) } } @@ -6275,43 +4959,16 @@ pub enum TruncateIdentityOption { Continue, } -/// Cascade/restrict option for Postgres TRUNCATE table, MySQL GRANT/REVOKE, etc. +/// PostgreSQL cascade option for TRUNCATE table /// [ CASCADE | RESTRICT ] #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum CascadeOption { +pub enum TruncateCascadeOption { Cascade, Restrict, } -impl Display for CascadeOption { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - CascadeOption::Cascade => write!(f, "CASCADE"), - CascadeOption::Restrict => write!(f, "RESTRICT"), - } - } -} - -/// Transaction started with [ TRANSACTION | WORK ] -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum BeginTransactionKind { - Transaction, - Work, -} - -impl Display for BeginTransactionKind { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - BeginTransactionKind::Transaction => write!(f, "TRANSACTION"), - BeginTransactionKind::Work => write!(f, "WORK"), - } - } -} - /// Can use to describe options in create sequence or table column type identity /// [ MINVALUE minvalue | NO MINVALUE ] [ MAXVALUE maxvalue | NO MAXVALUE ] #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -6529,161 +5186,34 @@ impl fmt::Display for FetchDirection { } } -/// The "position" for a FETCH statement. -/// -/// [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/language-elements/fetch-transact-sql) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum FetchPosition { - From, - In, -} - -impl fmt::Display for FetchPosition { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - FetchPosition::From => f.write_str("FROM")?, - FetchPosition::In => f.write_str("IN")?, - }; - - Ok(()) - } -} - /// A privilege on a database object (table, sequence, etc.). #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum Action { - AddSearchOptimization, - Apply { - apply_type: ActionApplyType, - }, - ApplyBudget, - AttachListing, - AttachPolicy, - Audit, - BindServiceEndpoint, Connect, - Create { - obj_type: Option, - }, - DatabaseRole { - role: ObjectName, - }, + Create, Delete, - EvolveSchema, - Exec { - obj_type: Option, - }, - Execute { - obj_type: Option, - }, - Failover, - ImportedPrivileges, - ImportShare, - Insert { - columns: Option>, - }, - Manage { - manage_type: ActionManageType, - }, - ManageReleases, - ManageVersions, - Modify { - modify_type: Option, - }, - Monitor { - monitor_type: Option, - }, - Operate, - OverrideShareRestrictions, - Ownership, - PurchaseDataExchangeListing, - Read, - ReadSession, - References { - columns: Option>, - }, - Replicate, - ResolveAll, - Role { - role: Ident, - }, - Select { - columns: Option>, - }, + Execute, + Insert { columns: Option> }, + References { columns: Option> }, + Select { columns: Option> }, Temporary, Trigger, Truncate, - Update { - columns: Option>, - }, + Update { columns: Option> }, Usage, } impl fmt::Display for Action { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Action::AddSearchOptimization => f.write_str("ADD SEARCH OPTIMIZATION")?, - Action::Apply { apply_type } => write!(f, "APPLY {apply_type}")?, - Action::ApplyBudget => f.write_str("APPLYBUDGET")?, - Action::AttachListing => f.write_str("ATTACH LISTING")?, - Action::AttachPolicy => f.write_str("ATTACH POLICY")?, - Action::Audit => f.write_str("AUDIT")?, - Action::BindServiceEndpoint => f.write_str("BIND SERVICE ENDPOINT")?, Action::Connect => f.write_str("CONNECT")?, - Action::Create { obj_type } => { - f.write_str("CREATE")?; - if let Some(obj_type) = obj_type { - write!(f, " {obj_type}")? - } - } - Action::DatabaseRole { role } => write!(f, "DATABASE ROLE {role}")?, + Action::Create => f.write_str("CREATE")?, Action::Delete => f.write_str("DELETE")?, - Action::EvolveSchema => f.write_str("EVOLVE SCHEMA")?, - Action::Exec { obj_type } => { - f.write_str("EXEC")?; - if let Some(obj_type) = obj_type { - write!(f, " {obj_type}")? - } - } - Action::Execute { obj_type } => { - f.write_str("EXECUTE")?; - if let Some(obj_type) = obj_type { - write!(f, " {obj_type}")? - } - } - Action::Failover => f.write_str("FAILOVER")?, - Action::ImportedPrivileges => f.write_str("IMPORTED PRIVILEGES")?, - Action::ImportShare => f.write_str("IMPORT SHARE")?, + Action::Execute => f.write_str("EXECUTE")?, Action::Insert { .. } => f.write_str("INSERT")?, - Action::Manage { manage_type } => write!(f, "MANAGE {manage_type}")?, - Action::ManageReleases => f.write_str("MANAGE RELEASES")?, - Action::ManageVersions => f.write_str("MANAGE VERSIONS")?, - Action::Modify { modify_type } => { - write!(f, "MODIFY")?; - if let Some(modify_type) = modify_type { - write!(f, " {modify_type}")?; - } - } - Action::Monitor { monitor_type } => { - write!(f, "MONITOR")?; - if let Some(monitor_type) = monitor_type { - write!(f, " {monitor_type}")? - } - } - Action::Operate => f.write_str("OPERATE")?, - Action::OverrideShareRestrictions => f.write_str("OVERRIDE SHARE RESTRICTIONS")?, - Action::Ownership => f.write_str("OWNERSHIP")?, - Action::PurchaseDataExchangeListing => f.write_str("PURCHASE DATA EXCHANGE LISTING")?, - Action::Read => f.write_str("READ")?, - Action::ReadSession => f.write_str("READ SESSION")?, Action::References { .. } => f.write_str("REFERENCES")?, - Action::Replicate => f.write_str("REPLICATE")?, - Action::ResolveAll => f.write_str("RESOLVE ALL")?, - Action::Role { role } => write!(f, "ROLE {role}")?, Action::Select { .. } => f.write_str("SELECT")?, Action::Temporary => f.write_str("TEMPORARY")?, Action::Trigger => f.write_str("TRIGGER")?, @@ -6706,268 +5236,6 @@ impl fmt::Display for Action { } } -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -/// See -/// under `globalPrivileges` in the `CREATE` privilege. -pub enum ActionCreateObjectType { - Account, - Application, - ApplicationPackage, - ComputePool, - DataExchangeListing, - Database, - ExternalVolume, - FailoverGroup, - Integration, - NetworkPolicy, - OrganiationListing, - ReplicationGroup, - Role, - Share, - User, - Warehouse, -} - -impl fmt::Display for ActionCreateObjectType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ActionCreateObjectType::Account => write!(f, "ACCOUNT"), - ActionCreateObjectType::Application => write!(f, "APPLICATION"), - ActionCreateObjectType::ApplicationPackage => write!(f, "APPLICATION PACKAGE"), - ActionCreateObjectType::ComputePool => write!(f, "COMPUTE POOL"), - ActionCreateObjectType::DataExchangeListing => write!(f, "DATA EXCHANGE LISTING"), - ActionCreateObjectType::Database => write!(f, "DATABASE"), - ActionCreateObjectType::ExternalVolume => write!(f, "EXTERNAL VOLUME"), - ActionCreateObjectType::FailoverGroup => write!(f, "FAILOVER GROUP"), - ActionCreateObjectType::Integration => write!(f, "INTEGRATION"), - ActionCreateObjectType::NetworkPolicy => write!(f, "NETWORK POLICY"), - ActionCreateObjectType::OrganiationListing => write!(f, "ORGANIZATION LISTING"), - ActionCreateObjectType::ReplicationGroup => write!(f, "REPLICATION GROUP"), - ActionCreateObjectType::Role => write!(f, "ROLE"), - ActionCreateObjectType::Share => write!(f, "SHARE"), - ActionCreateObjectType::User => write!(f, "USER"), - ActionCreateObjectType::Warehouse => write!(f, "WAREHOUSE"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -/// See -/// under `globalPrivileges` in the `APPLY` privilege. -pub enum ActionApplyType { - AggregationPolicy, - AuthenticationPolicy, - JoinPolicy, - MaskingPolicy, - PackagesPolicy, - PasswordPolicy, - ProjectionPolicy, - RowAccessPolicy, - SessionPolicy, - Tag, -} - -impl fmt::Display for ActionApplyType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ActionApplyType::AggregationPolicy => write!(f, "AGGREGATION POLICY"), - ActionApplyType::AuthenticationPolicy => write!(f, "AUTHENTICATION POLICY"), - ActionApplyType::JoinPolicy => write!(f, "JOIN POLICY"), - ActionApplyType::MaskingPolicy => write!(f, "MASKING POLICY"), - ActionApplyType::PackagesPolicy => write!(f, "PACKAGES POLICY"), - ActionApplyType::PasswordPolicy => write!(f, "PASSWORD POLICY"), - ActionApplyType::ProjectionPolicy => write!(f, "PROJECTION POLICY"), - ActionApplyType::RowAccessPolicy => write!(f, "ROW ACCESS POLICY"), - ActionApplyType::SessionPolicy => write!(f, "SESSION POLICY"), - ActionApplyType::Tag => write!(f, "TAG"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -/// See -/// under `globalPrivileges` in the `EXECUTE` privilege. -pub enum ActionExecuteObjectType { - Alert, - DataMetricFunction, - ManagedAlert, - ManagedTask, - Task, -} - -impl fmt::Display for ActionExecuteObjectType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ActionExecuteObjectType::Alert => write!(f, "ALERT"), - ActionExecuteObjectType::DataMetricFunction => write!(f, "DATA METRIC FUNCTION"), - ActionExecuteObjectType::ManagedAlert => write!(f, "MANAGED ALERT"), - ActionExecuteObjectType::ManagedTask => write!(f, "MANAGED TASK"), - ActionExecuteObjectType::Task => write!(f, "TASK"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -/// See -/// under `globalPrivileges` in the `MANAGE` privilege. -pub enum ActionManageType { - AccountSupportCases, - EventSharing, - Grants, - ListingAutoFulfillment, - OrganizationSupportCases, - UserSupportCases, - Warehouses, -} - -impl fmt::Display for ActionManageType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ActionManageType::AccountSupportCases => write!(f, "ACCOUNT SUPPORT CASES"), - ActionManageType::EventSharing => write!(f, "EVENT SHARING"), - ActionManageType::Grants => write!(f, "GRANTS"), - ActionManageType::ListingAutoFulfillment => write!(f, "LISTING AUTO FULFILLMENT"), - ActionManageType::OrganizationSupportCases => write!(f, "ORGANIZATION SUPPORT CASES"), - ActionManageType::UserSupportCases => write!(f, "USER SUPPORT CASES"), - ActionManageType::Warehouses => write!(f, "WAREHOUSES"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -/// See -/// under `globalPrivileges` in the `MODIFY` privilege. -pub enum ActionModifyType { - LogLevel, - TraceLevel, - SessionLogLevel, - SessionTraceLevel, -} - -impl fmt::Display for ActionModifyType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ActionModifyType::LogLevel => write!(f, "LOG LEVEL"), - ActionModifyType::TraceLevel => write!(f, "TRACE LEVEL"), - ActionModifyType::SessionLogLevel => write!(f, "SESSION LOG LEVEL"), - ActionModifyType::SessionTraceLevel => write!(f, "SESSION TRACE LEVEL"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -/// See -/// under `globalPrivileges` in the `MONITOR` privilege. -pub enum ActionMonitorType { - Execution, - Security, - Usage, -} - -impl fmt::Display for ActionMonitorType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ActionMonitorType::Execution => write!(f, "EXECUTION"), - ActionMonitorType::Security => write!(f, "SECURITY"), - ActionMonitorType::Usage => write!(f, "USAGE"), - } - } -} - -/// The principal that receives the privileges -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct Grantee { - pub grantee_type: GranteesType, - pub name: Option, -} - -impl fmt::Display for Grantee { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.grantee_type { - GranteesType::Role => { - write!(f, "ROLE ")?; - } - GranteesType::Share => { - write!(f, "SHARE ")?; - } - GranteesType::User => { - write!(f, "USER ")?; - } - GranteesType::Group => { - write!(f, "GROUP ")?; - } - GranteesType::Public => { - write!(f, "PUBLIC ")?; - } - GranteesType::DatabaseRole => { - write!(f, "DATABASE ROLE ")?; - } - GranteesType::Application => { - write!(f, "APPLICATION ")?; - } - GranteesType::ApplicationRole => { - write!(f, "APPLICATION ROLE ")?; - } - GranteesType::None => (), - } - if let Some(ref name) = self.name { - name.fmt(f)?; - } - Ok(()) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum GranteesType { - Role, - Share, - User, - Group, - Public, - DatabaseRole, - Application, - ApplicationRole, - None, -} - -/// Users/roles designated in a GRANT/REVOKE -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum GranteeName { - /// A bare identifier - ObjectName(ObjectName), - /// A MySQL user/host pair such as 'root'@'%' - UserHost { user: Ident, host: Ident }, -} - -impl fmt::Display for GranteeName { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - GranteeName::ObjectName(name) => name.fmt(f), - GranteeName::UserHost { user, host } => { - write!(f, "{user}@{host}") - } - } - } -} - /// Objects on which privileges are granted in a GRANT statement. #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -6977,71 +5245,12 @@ pub enum GrantObjects { AllSequencesInSchema { schemas: Vec }, /// Grant privileges on `ALL TABLES IN SCHEMA [, ...]` AllTablesInSchema { schemas: Vec }, - /// Grant privileges on `ALL VIEWS IN SCHEMA [, ...]` - AllViewsInSchema { schemas: Vec }, - /// Grant privileges on `ALL MATERIALIZED VIEWS IN SCHEMA [, ...]` - AllMaterializedViewsInSchema { schemas: Vec }, - /// Grant privileges on `ALL EXTERNAL TABLES IN SCHEMA [, ...]` - AllExternalTablesInSchema { schemas: Vec }, - /// Grant privileges on `FUTURE SCHEMAS IN DATABASE [, ...]` - FutureSchemasInDatabase { databases: Vec }, - /// Grant privileges on `FUTURE TABLES IN SCHEMA [, ...]` - FutureTablesInSchema { schemas: Vec }, - /// Grant privileges on `FUTURE VIEWS IN SCHEMA [, ...]` - FutureViewsInSchema { schemas: Vec }, - /// Grant privileges on `FUTURE EXTERNAL TABLES IN SCHEMA [, ...]` - FutureExternalTablesInSchema { schemas: Vec }, - /// Grant privileges on `FUTURE MATERIALIZED VIEWS IN SCHEMA [, ...]` - FutureMaterializedViewsInSchema { schemas: Vec }, - /// Grant privileges on `FUTURE SEQUENCES IN SCHEMA [, ...]` - FutureSequencesInSchema { schemas: Vec }, - /// Grant privileges on specific databases - Databases(Vec), /// Grant privileges on specific schemas Schemas(Vec), /// Grant privileges on specific sequences Sequences(Vec), /// Grant privileges on specific tables Tables(Vec), - /// Grant privileges on specific views - Views(Vec), - /// Grant privileges on specific warehouses - Warehouses(Vec), - /// Grant privileges on specific integrations - Integrations(Vec), - /// Grant privileges on resource monitors - ResourceMonitors(Vec), - /// Grant privileges on users - Users(Vec), - /// Grant privileges on compute pools - ComputePools(Vec), - /// Grant privileges on connections - Connections(Vec), - /// Grant privileges on failover groups - FailoverGroup(Vec), - /// Grant privileges on replication group - ReplicationGroup(Vec), - /// Grant privileges on external volumes - ExternalVolumes(Vec), - /// Grant privileges on a procedure. In dialects that - /// support overloading, the argument types must be specified. - /// - /// For example: - /// `GRANT USAGE ON PROCEDURE foo(varchar) TO ROLE role1` - Procedure { - name: ObjectName, - arg_types: Vec, - }, - - /// Grant privileges on a function. In dialects that - /// support overloading, the argument types must be specified. - /// - /// For example: - /// `GRANT USAGE ON FUNCTION foo(varchar) TO ROLE role1` - Function { - name: ObjectName, - arg_types: Vec, - }, } impl fmt::Display for GrantObjects { @@ -7050,24 +5259,12 @@ impl fmt::Display for GrantObjects { GrantObjects::Sequences(sequences) => { write!(f, "SEQUENCE {}", display_comma_separated(sequences)) } - GrantObjects::Databases(databases) => { - write!(f, "DATABASE {}", display_comma_separated(databases)) - } GrantObjects::Schemas(schemas) => { write!(f, "SCHEMA {}", display_comma_separated(schemas)) } GrantObjects::Tables(tables) => { write!(f, "{}", display_comma_separated(tables)) } - GrantObjects::Views(views) => { - write!(f, "VIEW {}", display_comma_separated(views)) - } - GrantObjects::Warehouses(warehouses) => { - write!(f, "WAREHOUSE {}", display_comma_separated(warehouses)) - } - GrantObjects::Integrations(integrations) => { - write!(f, "INTEGRATION {}", display_comma_separated(integrations)) - } GrantObjects::AllSequencesInSchema { schemas } => { write!( f, @@ -7082,139 +5279,10 @@ impl fmt::Display for GrantObjects { display_comma_separated(schemas) ) } - GrantObjects::AllExternalTablesInSchema { schemas } => { - write!( - f, - "ALL EXTERNAL TABLES IN SCHEMA {}", - display_comma_separated(schemas) - ) - } - GrantObjects::AllViewsInSchema { schemas } => { - write!( - f, - "ALL VIEWS IN SCHEMA {}", - display_comma_separated(schemas) - ) - } - GrantObjects::AllMaterializedViewsInSchema { schemas } => { - write!( - f, - "ALL MATERIALIZED VIEWS IN SCHEMA {}", - display_comma_separated(schemas) - ) - } - GrantObjects::FutureSchemasInDatabase { databases } => { - write!( - f, - "FUTURE SCHEMAS IN DATABASE {}", - display_comma_separated(databases) - ) - } - GrantObjects::FutureTablesInSchema { schemas } => { - write!( - f, - "FUTURE TABLES IN SCHEMA {}", - display_comma_separated(schemas) - ) - } - GrantObjects::FutureExternalTablesInSchema { schemas } => { - write!( - f, - "FUTURE EXTERNAL TABLES IN SCHEMA {}", - display_comma_separated(schemas) - ) - } - GrantObjects::FutureViewsInSchema { schemas } => { - write!( - f, - "FUTURE VIEWS IN SCHEMA {}", - display_comma_separated(schemas) - ) - } - GrantObjects::FutureMaterializedViewsInSchema { schemas } => { - write!( - f, - "FUTURE MATERIALIZED VIEWS IN SCHEMA {}", - display_comma_separated(schemas) - ) - } - GrantObjects::FutureSequencesInSchema { schemas } => { - write!( - f, - "FUTURE SEQUENCES IN SCHEMA {}", - display_comma_separated(schemas) - ) - } - GrantObjects::ResourceMonitors(objects) => { - write!(f, "RESOURCE MONITOR {}", display_comma_separated(objects)) - } - GrantObjects::Users(objects) => { - write!(f, "USER {}", display_comma_separated(objects)) - } - GrantObjects::ComputePools(objects) => { - write!(f, "COMPUTE POOL {}", display_comma_separated(objects)) - } - GrantObjects::Connections(objects) => { - write!(f, "CONNECTION {}", display_comma_separated(objects)) - } - GrantObjects::FailoverGroup(objects) => { - write!(f, "FAILOVER GROUP {}", display_comma_separated(objects)) - } - GrantObjects::ReplicationGroup(objects) => { - write!(f, "REPLICATION GROUP {}", display_comma_separated(objects)) - } - GrantObjects::ExternalVolumes(objects) => { - write!(f, "EXTERNAL VOLUME {}", display_comma_separated(objects)) - } - GrantObjects::Procedure { name, arg_types } => { - write!(f, "PROCEDURE {name}")?; - if !arg_types.is_empty() { - write!(f, "({})", display_comma_separated(arg_types))?; - } - Ok(()) - } - GrantObjects::Function { name, arg_types } => { - write!(f, "FUNCTION {name}")?; - if !arg_types.is_empty() { - write!(f, "({})", display_comma_separated(arg_types))?; - } - Ok(()) - } } } } -/// A `DENY` statement -/// -/// [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/statements/deny-transact-sql) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct DenyStatement { - pub privileges: Privileges, - pub objects: GrantObjects, - pub grantees: Vec, - pub granted_by: Option, - pub cascade: Option, -} - -impl fmt::Display for DenyStatement { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "DENY {}", self.privileges)?; - write!(f, " ON {}", self.objects)?; - if !self.grantees.is_empty() { - write!(f, " TO {}", display_comma_separated(&self.grantees))?; - } - if let Some(cascade) = &self.cascade { - write!(f, " {cascade}")?; - } - if let Some(granted_by) = &self.granted_by { - write!(f, " AS {granted_by}")?; - } - Ok(()) - } -} - /// SQL assignment `foo = expr` as used in SQLUpdate #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -7246,7 +5314,7 @@ pub enum AssignmentTarget { impl fmt::Display for AssignmentTarget { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - AssignmentTarget::ColumnName(column) => write!(f, "{column}"), + AssignmentTarget::ColumnName(column) => write!(f, "{}", column), AssignmentTarget::Tuple(columns) => write!(f, "({})", display_comma_separated(columns)), } } @@ -7266,8 +5334,8 @@ pub enum FunctionArgExpr { impl From for FunctionArgExpr { fn from(wildcard_expr: Expr) -> Self { match wildcard_expr { - Expr::QualifiedWildcard(prefix, _) => Self::QualifiedWildcard(prefix), - Expr::Wildcard(_) => Self::Wildcard, + Expr::QualifiedWildcard(prefix) => Self::QualifiedWildcard(prefix), + Expr::Wildcard => Self::Wildcard, expr => Self::Expr(expr), } } @@ -7294,10 +5362,6 @@ pub enum FunctionArgOperator { RightArrow, /// function(arg1 := value1) Assignment, - /// function(arg1 : value1) - Colon, - /// function(arg1 VALUE value1) - Value, } impl fmt::Display for FunctionArgOperator { @@ -7306,8 +5370,6 @@ impl fmt::Display for FunctionArgOperator { FunctionArgOperator::Equals => f.write_str("="), FunctionArgOperator::RightArrow => f.write_str("=>"), FunctionArgOperator::Assignment => f.write_str(":="), - FunctionArgOperator::Colon => f.write_str(":"), - FunctionArgOperator::Value => f.write_str("VALUE"), } } } @@ -7316,22 +5378,11 @@ impl fmt::Display for FunctionArgOperator { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum FunctionArg { - /// `name` is identifier - /// - /// Enabled when `Dialect::supports_named_fn_args_with_expr_name` returns 'false' Named { name: Ident, arg: FunctionArgExpr, operator: FunctionArgOperator, }, - /// `name` is arbitrary expression - /// - /// Enabled when `Dialect::supports_named_fn_args_with_expr_name` returns 'true' - ExprNamed { - name: Expr, - arg: FunctionArgExpr, - operator: FunctionArgOperator, - }, Unnamed(FunctionArgExpr), } @@ -7343,11 +5394,6 @@ impl fmt::Display for FunctionArg { arg, operator, } => write!(f, "{name} {operator} {arg}"), - FunctionArg::ExprNamed { - name, - arg, - operator, - } => write!(f, "{name} {operator} {arg}"), FunctionArg::Unnamed(unnamed_arg) => write!(f, "{unnamed_arg}"), } } @@ -7370,34 +5416,12 @@ impl fmt::Display for CloseCursor { } } -/// A Drop Domain statement -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct DropDomain { - /// Whether to drop the domain if it exists - pub if_exists: bool, - /// The name of the domain to drop - pub name: ObjectName, - /// The behavior to apply when dropping the domain - pub drop_behavior: Option, -} - /// A function call #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct Function { pub name: ObjectName, - /// Flags whether this function call uses the [ODBC syntax]. - /// - /// Example: - /// ```sql - /// SELECT {fn CONCAT('foo', 'bar')} - /// ``` - /// - /// [ODBC syntax]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017 - pub uses_odbc_syntax: bool, /// The parameters to the function, including any options specified within the /// delimiting parentheses. /// @@ -7436,10 +5460,6 @@ pub struct Function { impl fmt::Display for Function { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - if self.uses_odbc_syntax { - write!(f, "{{fn ")?; - } - write!(f, "{}{}{}", self.name, self.parameters, self.args)?; if !self.within_group.is_empty() { @@ -7459,12 +5479,7 @@ impl fmt::Display for Function { } if let Some(o) = &self.over { - f.write_str(" OVER ")?; - o.fmt(f)?; - } - - if self.uses_odbc_syntax { - write!(f, "}}")?; + write!(f, " OVER {o}")?; } Ok(()) @@ -7491,8 +5506,8 @@ impl fmt::Display for FunctionArguments { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { FunctionArguments::None => Ok(()), - FunctionArguments::Subquery(query) => write!(f, "({query})"), - FunctionArguments::List(args) => write!(f, "({args})"), + FunctionArguments::Subquery(query) => write!(f, "({})", query), + FunctionArguments::List(args) => write!(f, "({})", args), } } } @@ -7513,14 +5528,11 @@ pub struct FunctionArgumentList { impl fmt::Display for FunctionArgumentList { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(duplicate_treatment) = self.duplicate_treatment { - write!(f, "{duplicate_treatment} ")?; + write!(f, "{} ", duplicate_treatment)?; } write!(f, "{}", display_comma_separated(&self.args))?; if !self.clauses.is_empty() { - if !self.args.is_empty() { - write!(f, " ")?; - } - write!(f, "{}", display_separated(&self.clauses, " "))?; + write!(f, " {}", display_separated(&self.clauses, " "))?; } Ok(()) } @@ -7562,18 +5574,13 @@ pub enum FunctionArgumentClause { /// /// [`GROUP_CONCAT`]: https://dev.mysql.com/doc/refman/8.0/en/aggregate-functions.html#function_group-concat Separator(Value), - /// The json-null-clause to the [`JSON_ARRAY`]/[`JSON_OBJECT`] function in MSSQL. - /// - /// [`JSON_ARRAY`]: - /// [`JSON_OBJECT`]: - JsonNullClause(JsonNullClause), } impl fmt::Display for FunctionArgumentClause { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment) => { - write!(f, "{null_treatment}") + write!(f, "{}", null_treatment) } FunctionArgumentClause::OrderBy(order_by) => { write!(f, "ORDER BY {}", display_comma_separated(order_by)) @@ -7582,32 +5589,10 @@ impl fmt::Display for FunctionArgumentClause { FunctionArgumentClause::OnOverflow(on_overflow) => write!(f, "{on_overflow}"), FunctionArgumentClause::Having(bound) => write!(f, "{bound}"), FunctionArgumentClause::Separator(sep) => write!(f, "SEPARATOR {sep}"), - FunctionArgumentClause::JsonNullClause(null_clause) => write!(f, "{null_clause}"), } } } -/// A method call -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct Method { - pub expr: Box, - // always non-empty - pub method_chain: Vec, -} - -impl fmt::Display for Method { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!( - f, - "{}.{}", - self.expr, - display_separated(&self.method_chain, ".") - ) - } -} - #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -7746,7 +5731,6 @@ impl fmt::Display for HavingBoundKind { pub enum ObjectType { Table, View, - MaterializedView, Index, Schema, Database, @@ -7761,7 +5745,6 @@ impl fmt::Display for ObjectType { f.write_str(match self { ObjectType::Table => "TABLE", ObjectType::View => "VIEW", - ObjectType::MaterializedView => "MATERIALIZED VIEW", ObjectType::Index => "INDEX", ObjectType::Schema => "SCHEMA", ObjectType::Database => "DATABASE", @@ -7817,14 +5800,6 @@ pub enum HiveRowFormat { DELIMITED { delimiters: Vec }, } -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct HiveLoadDataFormat { - pub serde: Expr, - pub input_format: Expr, -} - #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -8012,29 +5987,17 @@ pub enum SqlOption { range_direction: Option, for_values: Vec, }, - /// Comment parameter (supports `=` and no `=` syntax) - Comment(CommentDef), - /// MySQL TableSpace option - /// - TableSpace(TablespaceOption), - /// An option representing a key value pair, where the value is a parenthesized list and with an optional name - /// e.g. - /// - /// UNION = (tbl_name\[,tbl_name\]...) - /// ENGINE = ReplicatedMergeTree('/table_name','{replica}', ver) - /// ENGINE = SummingMergeTree(\[columns\]) - NamedParenthesizedList(NamedParenthesizedList), } impl fmt::Display for SqlOption { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - SqlOption::Clustered(c) => write!(f, "{c}"), + SqlOption::Clustered(c) => write!(f, "{}", c), SqlOption::Ident(ident) => { - write!(f, "{ident}") + write!(f, "{}", ident) } SqlOption::KeyValue { key: name, value } => { - write!(f, "{name} = {value}") + write!(f, "{} = {}", name, value) } SqlOption::Partition { column_name, @@ -8055,54 +6018,10 @@ impl fmt::Display for SqlOption { display_comma_separated(for_values) ) } - SqlOption::TableSpace(tablespace_option) => { - write!(f, "TABLESPACE {}", tablespace_option.name)?; - match tablespace_option.storage { - Some(StorageType::Disk) => write!(f, " STORAGE DISK"), - Some(StorageType::Memory) => write!(f, " STORAGE MEMORY"), - None => Ok(()), - } - } - SqlOption::Comment(comment) => match comment { - CommentDef::WithEq(comment) => { - write!(f, "COMMENT = '{comment}'") - } - CommentDef::WithoutEq(comment) => { - write!(f, "COMMENT '{comment}'") - } - }, - SqlOption::NamedParenthesizedList(value) => { - write!(f, "{} = ", value.key)?; - if let Some(key) = &value.name { - write!(f, "{key}")?; - } - if !value.values.is_empty() { - write!(f, "({})", display_comma_separated(&value.values))? - } - Ok(()) - } } } } -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum StorageType { - Disk, - Memory, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -/// MySql TableSpace option -/// -pub struct TablespaceOption { - pub name: String, - pub storage: Option, -} - #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -8117,70 +6036,6 @@ impl fmt::Display for SecretOption { } } -/// A `CREATE SERVER` statement. -/// -/// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createserver.html) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct CreateServerStatement { - pub name: ObjectName, - pub if_not_exists: bool, - pub server_type: Option, - pub version: Option, - pub foreign_data_wrapper: ObjectName, - pub options: Option>, -} - -impl fmt::Display for CreateServerStatement { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let CreateServerStatement { - name, - if_not_exists, - server_type, - version, - foreign_data_wrapper, - options, - } = self; - - write!( - f, - "CREATE SERVER {if_not_exists}{name} ", - if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" }, - )?; - - if let Some(st) = server_type { - write!(f, "TYPE {st} ")?; - } - - if let Some(v) = version { - write!(f, "VERSION {v} ")?; - } - - write!(f, "FOREIGN DATA WRAPPER {foreign_data_wrapper}")?; - - if let Some(o) = options { - write!(f, " OPTIONS ({o})", o = display_comma_separated(o))?; - } - - Ok(()) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct CreateServerOption { - pub key: Ident, - pub value: Ident, -} - -impl fmt::Display for CreateServerOption { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{} {}", self.key, self.value) - } -} - #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -8195,7 +6050,7 @@ impl fmt::Display for AttachDuckDBDatabaseOption { AttachDuckDBDatabaseOption::ReadOnly(Some(true)) => write!(f, "READ_ONLY true"), AttachDuckDBDatabaseOption::ReadOnly(Some(false)) => write!(f, "READ_ONLY false"), AttachDuckDBDatabaseOption::ReadOnly(None) => write!(f, "READ_ONLY"), - AttachDuckDBDatabaseOption::Type(t) => write!(f, "TYPE {t}"), + AttachDuckDBDatabaseOption::Type(t) => write!(f, "TYPE {}", t), } } } @@ -8244,7 +6099,6 @@ pub enum TransactionIsolationLevel { ReadCommitted, RepeatableRead, Serializable, - Snapshot, } impl fmt::Display for TransactionIsolationLevel { @@ -8255,15 +6109,13 @@ impl fmt::Display for TransactionIsolationLevel { ReadCommitted => "READ COMMITTED", RepeatableRead => "REPEATABLE READ", Serializable => "SERIALIZABLE", - Snapshot => "SNAPSHOT", }) } } -/// Modifier for the transaction in the `BEGIN` syntax +/// SQLite specific syntax /// -/// SQLite: -/// MS-SQL: +/// #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -8271,8 +6123,6 @@ pub enum TransactionModifier { Deferred, Immediate, Exclusive, - Try, - Catch, } impl fmt::Display for TransactionModifier { @@ -8282,8 +6132,6 @@ impl fmt::Display for TransactionModifier { Deferred => "DEFERRED", Immediate => "IMMEDIATE", Exclusive => "EXCLUSIVE", - Try => "TRY", - Catch => "CATCH", }) } } @@ -8313,14 +6161,14 @@ impl fmt::Display for ShowStatementFilter { #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum ShowStatementInClause { +pub enum ShowClause { IN, FROM, } -impl fmt::Display for ShowStatementInClause { +impl fmt::Display for ShowClause { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - use ShowStatementInClause::*; + use ShowClause::*; match self { FROM => write!(f, "FROM"), IN => write!(f, "IN"), @@ -8347,11 +6195,11 @@ impl fmt::Display for SqliteOnConflict { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use SqliteOnConflict::*; match self { - Rollback => write!(f, "OR ROLLBACK"), - Abort => write!(f, "OR ABORT"), - Fail => write!(f, "OR FAIL"), - Ignore => write!(f, "OR IGNORE"), - Replace => write!(f, "OR REPLACE"), + Rollback => write!(f, "ROLLBACK"), + Abort => write!(f, "ABORT"), + Fail => write!(f, "FAIL"), + Ignore => write!(f, "IGNORE"), + Replace => write!(f, "REPLACE"), } } } @@ -8415,7 +6263,7 @@ impl fmt::Display for CopyTarget { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use CopyTarget::*; match self { - Stdin => write!(f, "STDIN"), + Stdin { .. } => write!(f, "STDIN"), Stdout => write!(f, "STDOUT"), File { filename } => write!(f, "'{}'", value::escape_single_quote_string(filename)), Program { command } => write!( @@ -8743,35 +6591,6 @@ impl Display for MergeClause { } } -/// A Output Clause in the end of a 'MERGE' Statement -/// -/// Example: -/// OUTPUT $action, deleted.* INTO dbo.temp_products; -/// [mssql](https://learn.microsoft.com/en-us/sql/t-sql/queries/output-clause-transact-sql) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct OutputClause { - pub select_items: Vec, - pub into_table: SelectInto, -} - -impl fmt::Display for OutputClause { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let OutputClause { - select_items, - into_table, - } = self; - - write!( - f, - "OUTPUT {} {}", - display_comma_separated(select_items), - into_table - ) - } -} - #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -8849,30 +6668,30 @@ impl fmt::Display for FlushLocation { } } -/// Optional context modifier for statements that can be or `LOCAL`, `GLOBAL`, or `SESSION`. +/// Optional context modifier for statements that can be or `LOCAL`, or `SESSION`. #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum ContextModifier { + /// No context defined. Each dialect defines the default in this scenario. + None, /// `LOCAL` identifier, usually related to transactional states. Local, /// `SESSION` identifier Session, - /// `GLOBAL` identifier - Global, } impl fmt::Display for ContextModifier { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { + Self::None => { + write!(f, "") + } Self::Local => { - write!(f, "LOCAL ") + write!(f, " LOCAL") } Self::Session => { - write!(f, "SESSION ") - } - Self::Global => { - write!(f, "GLOBAL ") + write!(f, " SESSION") } } } @@ -9071,8 +6890,7 @@ impl fmt::Display for FunctionDeterminismSpecifier { /// where within the statement, the body shows up. /// /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_11 -/// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html -/// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql +/// [Postgres]: https://www.postgresql.org/docs/15/sql-createfunction.html #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -9101,22 +6919,6 @@ pub enum CreateFunctionBody { /// /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_11 AsAfterOptions(Expr), - /// Function body with statements before the `RETURN` keyword. - /// - /// Example: - /// ```sql - /// CREATE FUNCTION my_scalar_udf(a INT, b INT) - /// RETURNS INT - /// AS - /// BEGIN - /// DECLARE c INT; - /// SET c = a + b; - /// RETURN c; - /// END - /// ``` - /// - /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql - AsBeginEnd(BeginEndStatements), /// Function body expression using the 'RETURN' keyword. /// /// Example: @@ -9126,32 +6928,8 @@ pub enum CreateFunctionBody { /// RETURN a + b; /// ``` /// - /// [PostgreSQL]: https://www.postgresql.org/docs/current/sql-createfunction.html + /// [Postgres]: https://www.postgresql.org/docs/current/sql-createfunction.html Return(Expr), - - /// Function body expression using the 'AS RETURN' keywords - /// - /// Example: - /// ```sql - /// CREATE FUNCTION myfunc(a INT, b INT) - /// RETURNS TABLE - /// AS RETURN (SELECT a + b AS sum); - /// ``` - /// - /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql - AsReturnExpr(Expr), - - /// Function body expression using the 'AS RETURN' keywords, with an un-parenthesized SELECT query - /// - /// Example: - /// ```sql - /// CREATE FUNCTION myfunc(a INT, b INT) - /// RETURNS TABLE - /// AS RETURN SELECT a + b AS sum; - /// ``` - /// - /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql?view=sql-server-ver16#select_stmt - AsReturnSelect(Select), } #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -9378,100 +7156,38 @@ pub enum MySQLColumnPosition { impl Display for MySQLColumnPosition { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - MySQLColumnPosition::First => write!(f, "FIRST"), + MySQLColumnPosition::First => Ok(write!(f, "FIRST")?), MySQLColumnPosition::After(ident) => { let column_name = &ident.value; - write!(f, "AFTER {column_name}") + Ok(write!(f, "AFTER {column_name}")?) } } } } -/// MySQL `CREATE VIEW` algorithm parameter: [ALGORITHM = {UNDEFINED | MERGE | TEMPTABLE}] -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum CreateViewAlgorithm { - Undefined, - Merge, - TempTable, -} - -impl Display for CreateViewAlgorithm { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - CreateViewAlgorithm::Undefined => write!(f, "UNDEFINED"), - CreateViewAlgorithm::Merge => write!(f, "MERGE"), - CreateViewAlgorithm::TempTable => write!(f, "TEMPTABLE"), - } - } -} -/// MySQL `CREATE VIEW` security parameter: [SQL SECURITY { DEFINER | INVOKER }] -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum CreateViewSecurity { - Definer, - Invoker, -} - -impl Display for CreateViewSecurity { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - CreateViewSecurity::Definer => write!(f, "DEFINER"), - CreateViewSecurity::Invoker => write!(f, "INVOKER"), - } - } -} - -/// [MySQL] `CREATE VIEW` additional parameters +/// Engine of DB. Some warehouse has parameters of engine, e.g. [clickhouse] /// -/// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html +/// [clickhouse]: https://clickhouse.com/docs/en/engines/table-engines #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct CreateViewParams { - pub algorithm: Option, - pub definer: Option, - pub security: Option, +pub struct TableEngine { + pub name: String, + pub parameters: Option>, } -impl Display for CreateViewParams { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let CreateViewParams { - algorithm, - definer, - security, - } = self; - if let Some(algorithm) = algorithm { - write!(f, "ALGORITHM = {algorithm} ")?; - } - if let Some(definers) = definer { - write!(f, "DEFINER = {definers} ")?; - } - if let Some(security) = security { - write!(f, "SQL SECURITY {security} ")?; +impl Display for TableEngine { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.name)?; + + if let Some(parameters) = self.parameters.as_ref() { + write!(f, "({})", display_comma_separated(parameters))?; } + Ok(()) } } -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -/// Key/Value, where the value is a (optionally named) list of identifiers -/// -/// ```sql -/// UNION = (tbl_name[,tbl_name]...) -/// ENGINE = ReplicatedMergeTree('/table_name','{replica}', ver) -/// ENGINE = SummingMergeTree([columns]) -/// ``` -pub struct NamedParenthesizedList { - pub key: Ident, - pub name: Option, - pub values: Vec, -} - /// Snowflake `WITH ROW ACCESS POLICY policy_name ON (identifier, ...)` /// /// @@ -9508,12 +7224,12 @@ impl Display for RowAccessPolicy { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct Tag { - pub key: ObjectName, + pub key: Ident, pub value: String, } impl Tag { - pub fn new(key: ObjectName, value: String) -> Self { + pub fn new(key: Ident, value: String) -> Self { Self { key, value } } } @@ -9533,12 +7249,18 @@ pub enum CommentDef { /// Does not include `=` when printing the comment, as `COMMENT 'comment'` WithEq(String), WithoutEq(String), + // For Hive dialect, the table comment is after the column definitions without `=`, + // so we need to add an extra variant to allow to identify this case when displaying. + // [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable) + AfterColumnDefsWithoutEq(String), } impl Display for CommentDef { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - CommentDef::WithEq(comment) | CommentDef::WithoutEq(comment) => write!(f, "{comment}"), + CommentDef::WithEq(comment) + | CommentDef::WithoutEq(comment) + | CommentDef::AfterColumnDefsWithoutEq(comment) => write!(f, "{comment}"), } } } @@ -9604,7 +7326,7 @@ where /// ```sql /// EXPLAIN (ANALYZE, VERBOSE TRUE, FORMAT TEXT) SELECT * FROM my_table; /// -/// VACUUM (VERBOSE, ANALYZE ON, PARALLEL 10) my_table; +/// VACCUM (VERBOSE, ANALYZE ON, PARALLEL 10) my_table; /// ``` #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -9624,446 +7346,8 @@ impl Display for UtilityOption { } } -/// Represents the different options available for `SHOW` -/// statements to filter the results. Example from Snowflake: -/// -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct ShowStatementOptions { - pub show_in: Option, - pub starts_with: Option, - pub limit: Option, - pub limit_from: Option, - pub filter_position: Option, -} - -impl Display for ShowStatementOptions { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let (like_in_infix, like_in_suffix) = match &self.filter_position { - Some(ShowStatementFilterPosition::Infix(filter)) => { - (format!(" {filter}"), "".to_string()) - } - Some(ShowStatementFilterPosition::Suffix(filter)) => { - ("".to_string(), format!(" {filter}")) - } - None => ("".to_string(), "".to_string()), - }; - write!( - f, - "{like_in_infix}{show_in}{starts_with}{limit}{from}{like_in_suffix}", - show_in = match &self.show_in { - Some(i) => format!(" {i}"), - None => String::new(), - }, - starts_with = match &self.starts_with { - Some(s) => format!(" STARTS WITH {s}"), - None => String::new(), - }, - limit = match &self.limit { - Some(l) => format!(" LIMIT {l}"), - None => String::new(), - }, - from = match &self.limit_from { - Some(f) => format!(" FROM {f}"), - None => String::new(), - } - )?; - Ok(()) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum ShowStatementFilterPosition { - Infix(ShowStatementFilter), // For example: SHOW COLUMNS LIKE '%name%' IN TABLE tbl - Suffix(ShowStatementFilter), // For example: SHOW COLUMNS IN tbl LIKE '%name%' -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum ShowStatementInParentType { - Account, - Database, - Schema, - Table, - View, -} - -impl fmt::Display for ShowStatementInParentType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - ShowStatementInParentType::Account => write!(f, "ACCOUNT"), - ShowStatementInParentType::Database => write!(f, "DATABASE"), - ShowStatementInParentType::Schema => write!(f, "SCHEMA"), - ShowStatementInParentType::Table => write!(f, "TABLE"), - ShowStatementInParentType::View => write!(f, "VIEW"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct ShowStatementIn { - pub clause: ShowStatementInClause, - pub parent_type: Option, - #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] - pub parent_name: Option, -} - -impl fmt::Display for ShowStatementIn { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.clause)?; - if let Some(parent_type) = &self.parent_type { - write!(f, " {parent_type}")?; - } - if let Some(parent_name) = &self.parent_name { - write!(f, " {parent_name}")?; - } - Ok(()) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct ShowObjects { - pub terse: bool, - pub show_options: ShowStatementOptions, -} - -/// MSSQL's json null clause -/// -/// ```plaintext -/// ::= -/// NULL ON NULL -/// | ABSENT ON NULL -/// ``` -/// -/// -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum JsonNullClause { - NullOnNull, - AbsentOnNull, -} - -impl Display for JsonNullClause { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - JsonNullClause::NullOnNull => write!(f, "NULL ON NULL"), - JsonNullClause::AbsentOnNull => write!(f, "ABSENT ON NULL"), - } - } -} - -/// rename object definition -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct RenameTable { - pub old_name: ObjectName, - pub new_name: ObjectName, -} - -impl fmt::Display for RenameTable { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{} TO {}", self.old_name, self.new_name)?; - Ok(()) - } -} - -/// Represents the referenced table in an `INSERT INTO` statement -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum TableObject { - /// Table specified by name. - /// Example: - /// ```sql - /// INSERT INTO my_table - /// ``` - TableName(#[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] ObjectName), - - /// Table specified as a function. - /// Example: - /// ```sql - /// INSERT INTO TABLE FUNCTION remote('localhost', default.simple_table) - /// ``` - /// [Clickhouse](https://clickhouse.com/docs/en/sql-reference/table-functions) - TableFunction(Function), -} - -impl fmt::Display for TableObject { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Self::TableName(table_name) => write!(f, "{table_name}"), - Self::TableFunction(func) => write!(f, "FUNCTION {func}"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum SetSessionParamKind { - Generic(SetSessionParamGeneric), - IdentityInsert(SetSessionParamIdentityInsert), - Offsets(SetSessionParamOffsets), - Statistics(SetSessionParamStatistics), -} - -impl fmt::Display for SetSessionParamKind { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - SetSessionParamKind::Generic(x) => write!(f, "{x}"), - SetSessionParamKind::IdentityInsert(x) => write!(f, "{x}"), - SetSessionParamKind::Offsets(x) => write!(f, "{x}"), - SetSessionParamKind::Statistics(x) => write!(f, "{x}"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct SetSessionParamGeneric { - pub names: Vec, - pub value: String, -} - -impl fmt::Display for SetSessionParamGeneric { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{} {}", display_comma_separated(&self.names), self.value) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct SetSessionParamIdentityInsert { - pub obj: ObjectName, - pub value: SessionParamValue, -} - -impl fmt::Display for SetSessionParamIdentityInsert { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "IDENTITY_INSERT {} {}", self.obj, self.value) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct SetSessionParamOffsets { - pub keywords: Vec, - pub value: SessionParamValue, -} - -impl fmt::Display for SetSessionParamOffsets { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!( - f, - "OFFSETS {} {}", - display_comma_separated(&self.keywords), - self.value - ) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct SetSessionParamStatistics { - pub topic: SessionParamStatsTopic, - pub value: SessionParamValue, -} - -impl fmt::Display for SetSessionParamStatistics { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "STATISTICS {} {}", self.topic, self.value) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum SessionParamStatsTopic { - IO, - Profile, - Time, - Xml, -} - -impl fmt::Display for SessionParamStatsTopic { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - SessionParamStatsTopic::IO => write!(f, "IO"), - SessionParamStatsTopic::Profile => write!(f, "PROFILE"), - SessionParamStatsTopic::Time => write!(f, "TIME"), - SessionParamStatsTopic::Xml => write!(f, "XML"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum SessionParamValue { - On, - Off, -} - -impl fmt::Display for SessionParamValue { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - SessionParamValue::On => write!(f, "ON"), - SessionParamValue::Off => write!(f, "OFF"), - } - } -} - -/// Snowflake StorageSerializationPolicy for Iceberg Tables -/// ```sql -/// [ STORAGE_SERIALIZATION_POLICY = { COMPATIBLE | OPTIMIZED } ] -/// ``` -/// -/// -#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum StorageSerializationPolicy { - Compatible, - Optimized, -} - -impl Display for StorageSerializationPolicy { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - StorageSerializationPolicy::Compatible => write!(f, "COMPATIBLE"), - StorageSerializationPolicy::Optimized => write!(f, "OPTIMIZED"), - } - } -} - -/// Variants of the Snowflake `COPY INTO` statement -#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum CopyIntoSnowflakeKind { - /// Loads data from files to a table - /// See: - Table, - /// Unloads data from a table or query to external files - /// See: - Location, -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct PrintStatement { - pub message: Box, -} - -impl fmt::Display for PrintStatement { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "PRINT {}", self.message) - } -} - -/// Represents a `Return` statement. -/// -/// [MsSql triggers](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-trigger-transact-sql) -/// [MsSql functions](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct ReturnStatement { - pub value: Option, -} - -impl fmt::Display for ReturnStatement { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match &self.value { - Some(ReturnStatementValue::Expr(expr)) => write!(f, "RETURN {expr}"), - None => write!(f, "RETURN"), - } - } -} - -/// Variants of a `RETURN` statement -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum ReturnStatementValue { - Expr(Expr), -} - -/// Represents an `OPEN` statement. -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct OpenStatement { - /// Cursor name - pub cursor_name: Ident, -} - -impl fmt::Display for OpenStatement { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "OPEN {}", self.cursor_name) - } -} - -/// Specifies Include / Exclude NULL within UNPIVOT command. -/// For example -/// `UNPIVOT (column1 FOR new_column IN (col3, col4, col5, col6))` -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum NullInclusion { - IncludeNulls, - ExcludeNulls, -} - -impl fmt::Display for NullInclusion { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - NullInclusion::IncludeNulls => write!(f, "INCLUDE NULLS"), - NullInclusion::ExcludeNulls => write!(f, "EXCLUDE NULLS"), - } - } -} - -/// Checks membership of a value in a JSON array -/// -/// Syntax: -/// ```sql -/// MEMBER OF() -/// ``` -/// [MySQL](https://dev.mysql.com/doc/refman/8.4/en/json-search-functions.html#operator_member-of) -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct MemberOf { - pub value: Box, - pub array: Box, -} - -impl fmt::Display for MemberOf { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{} MEMBER OF({})", self.value, self.array) - } -} - #[cfg(test)] mod tests { - use crate::tokenizer::Location; - use super::*; #[test] @@ -10161,9 +7445,9 @@ mod tests { #[test] fn test_interval_display() { let interval = Expr::Interval(Interval { - value: Box::new(Expr::Value( - Value::SingleQuotedString(String::from("123:45.67")).with_empty_span(), - )), + value: Box::new(Expr::Value(Value::SingleQuotedString(String::from( + "123:45.67", + )))), leading_field: Some(DateTimeField::Minute), leading_precision: Some(10), last_field: Some(DateTimeField::Second), @@ -10175,9 +7459,7 @@ mod tests { ); let interval = Expr::Interval(Interval { - value: Box::new(Expr::Value( - Value::SingleQuotedString(String::from("5")).with_empty_span(), - )), + value: Box::new(Expr::Value(Value::SingleQuotedString(String::from("5")))), leading_field: Some(DateTimeField::Second), leading_precision: Some(1), last_field: None, @@ -10359,16 +7641,4 @@ mod tests { test_steps(OneOrManyWithParens::Many(vec![2]), vec![2], 3); test_steps(OneOrManyWithParens::Many(vec![3, 4]), vec![3, 4], 4); } - - // Tests that the position in the code of an `Ident` does not affect its - // ordering. - #[test] - fn test_ident_ord() { - let mut a = Ident::with_span(Span::new(Location::new(1, 1), Location::new(1, 1)), "a"); - let mut b = Ident::with_span(Span::new(Location::new(2, 2), Location::new(2, 2)), "b"); - - assert!(a < b); - std::mem::swap(&mut a.span, &mut b.span); - assert!(a < b); - } } diff --git a/src/ast/operator.rs b/src/ast/operator.rs index d0bb05e3..c3bb379d 100644 --- a/src/ast/operator.rs +++ b/src/ast/operator.rs @@ -51,23 +51,6 @@ pub enum UnaryOperator { PGPrefixFactorial, /// Absolute value, e.g. `@ -9` (PostgreSQL-specific) PGAbs, - /// Unary logical not operator: e.g. `! false` (Hive-specific) - BangNot, - /// `#` Number of points in path or polygon (PostgreSQL/Redshift geometric operator) - /// see - Hash, - /// `@-@` Length or circumference (PostgreSQL/Redshift geometric operator) - /// see - AtDashAt, - /// `@@` Center (PostgreSQL/Redshift geometric operator) - /// see - DoubleAt, - /// `?-` Is horizontal? (PostgreSQL/Redshift geometric operator) - /// see - QuestionDash, - /// `?|` Is vertical? (PostgreSQL/Redshift geometric operator) - /// see - QuestionPipe, } impl fmt::Display for UnaryOperator { @@ -82,12 +65,6 @@ impl fmt::Display for UnaryOperator { UnaryOperator::PGPostfixFactorial => "!", UnaryOperator::PGPrefixFactorial => "!!", UnaryOperator::PGAbs => "@", - UnaryOperator::BangNot => "!", - UnaryOperator::Hash => "#", - UnaryOperator::AtDashAt => "@-@", - UnaryOperator::DoubleAt => "@@", - UnaryOperator::QuestionDash => "?-", - UnaryOperator::QuestionPipe => "?|", }) } } @@ -139,11 +116,6 @@ pub enum BinaryOperator { DuckIntegerDivide, /// MySQL [`DIV`](https://dev.mysql.com/doc/refman/8.0/en/arithmetic-functions.html) integer division MyIntegerDivide, - /// MATCH operator, e.g. `a MATCH b` (SQLite-specific) - /// See - Match, - /// REGEXP operator, e.g. `a REGEXP b` (SQLite-specific) - Regexp, /// Support for custom operators (such as Postgres custom operators) Custom(String), /// Bitwise XOR, e.g. `a # b` (PostgreSQL-specific) @@ -273,62 +245,6 @@ pub enum BinaryOperator { /// See [CREATE OPERATOR](https://www.postgresql.org/docs/current/sql-createoperator.html) /// for more information. PGCustomBinaryOperator(Vec), - /// The `OVERLAPS` operator - /// - /// Specifies a test for an overlap between two datetime periods: - /// - Overlaps, - /// `##` Point of closest proximity (PostgreSQL/Redshift geometric operator) - /// See - DoubleHash, - /// `<->` Distance between (PostgreSQL/Redshift geometric operator) - /// See - LtDashGt, - /// `&<` Overlaps to left? (PostgreSQL/Redshift geometric operator) - /// See - AndLt, - /// `&>` Overlaps to right? (PostgreSQL/Redshift geometric operator) - /// See - AndGt, - /// `<<|` Is strictly below? (PostgreSQL/Redshift geometric operator) - /// See - LtLtPipe, - /// `|>>` Is strictly above? (PostgreSQL/Redshift geometric operator) - /// See - PipeGtGt, - /// `&<|` Does not extend above? (PostgreSQL/Redshift geometric operator) - /// See - AndLtPipe, - /// `|&>` Does not extend below? (PostgreSQL/Redshift geometric operator) - /// See - PipeAndGt, - /// `<^` Is below? (PostgreSQL/Redshift geometric operator) - /// See - LtCaret, - /// `>^` Is above? (PostgreSQL/Redshift geometric operator) - /// See - GtCaret, - /// `?#` Intersects? (PostgreSQL/Redshift geometric operator) - /// See - QuestionHash, - /// `?-` Is horizontal? (PostgreSQL/Redshift geometric operator) - /// See - QuestionDash, - /// `?-|` Is perpendicular? (PostgreSQL/Redshift geometric operator) - /// See - QuestionDashPipe, - /// `?||` Are Parallel? (PostgreSQL/Redshift geometric operator) - /// See - QuestionDoublePipe, - /// `@` Contained or on? (PostgreSQL/Redshift geometric operator) - /// See - At, - /// `~=` Same as? (PostgreSQL/Redshift geometric operator) - /// See - TildeEq, - /// ':=' Assignment Operator - /// See - Assignment, } impl fmt::Display for BinaryOperator { @@ -355,8 +271,6 @@ impl fmt::Display for BinaryOperator { BinaryOperator::BitwiseXor => f.write_str("^"), BinaryOperator::DuckIntegerDivide => f.write_str("//"), BinaryOperator::MyIntegerDivide => f.write_str("DIV"), - BinaryOperator::Match => f.write_str("MATCH"), - BinaryOperator::Regexp => f.write_str("REGEXP"), BinaryOperator::Custom(s) => f.write_str(s), BinaryOperator::PGBitwiseXor => f.write_str("#"), BinaryOperator::PGBitwiseShiftLeft => f.write_str("<<"), @@ -387,24 +301,6 @@ impl fmt::Display for BinaryOperator { BinaryOperator::PGCustomBinaryOperator(idents) => { write!(f, "OPERATOR({})", display_separated(idents, ".")) } - BinaryOperator::Overlaps => f.write_str("OVERLAPS"), - BinaryOperator::DoubleHash => f.write_str("##"), - BinaryOperator::LtDashGt => f.write_str("<->"), - BinaryOperator::AndLt => f.write_str("&<"), - BinaryOperator::AndGt => f.write_str("&>"), - BinaryOperator::LtLtPipe => f.write_str("<<|"), - BinaryOperator::PipeGtGt => f.write_str("|>>"), - BinaryOperator::AndLtPipe => f.write_str("&<|"), - BinaryOperator::PipeAndGt => f.write_str("|&>"), - BinaryOperator::LtCaret => f.write_str("<^"), - BinaryOperator::GtCaret => f.write_str(">^"), - BinaryOperator::QuestionHash => f.write_str("?#"), - BinaryOperator::QuestionDash => f.write_str("?-"), - BinaryOperator::QuestionDashPipe => f.write_str("?-|"), - BinaryOperator::QuestionDoublePipe => f.write_str("?||"), - BinaryOperator::At => f.write_str("@"), - BinaryOperator::TildeEq => f.write_str("~="), - BinaryOperator::Assignment => f.write_str(":="), } } } diff --git a/src/ast/query.rs b/src/ast/query.rs index 7ffb64d9..7af47243 100644 --- a/src/ast/query.rs +++ b/src/ast/query.rs @@ -18,18 +18,13 @@ #[cfg(not(feature = "std"))] use alloc::{boxed::Box, vec::Vec}; -use helpers::attached_token::AttachedToken; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; -use crate::{ - ast::*, - display_utils::{indented_list, SpaceOrNewline}, - tokenizer::{Token, TokenWithSpan}, -}; +use crate::ast::*; /// The most complete variant of a `SELECT` query expression, optionally /// including `WITH`, `UNION` / other set operations, and `ORDER BY`. @@ -44,8 +39,14 @@ pub struct Query { pub body: Box, /// ORDER BY pub order_by: Option, - /// `LIMIT ... OFFSET ... | LIMIT , ` - pub limit_clause: Option, + /// `LIMIT { | ALL }` + pub limit: Option, + + /// `LIMIT { } BY { ,,... } }` + pub limit_by: Vec, + + /// `OFFSET [ { ROW | ROWS } ]` + pub offset: Option, /// `FETCH { FIRST | NEXT } [ PERCENT ] { ROW | ROWS } | { ONLY | WITH TIES }` pub fetch: Option, /// `FOR { UPDATE | SHARE } [ OF table_name ] [ SKIP LOCKED | NOWAIT ]` @@ -63,49 +64,40 @@ pub struct Query { /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/select/format) /// (ClickHouse-specific) pub format_clause: Option, - - /// Pipe operator - pub pipe_operators: Vec, } impl fmt::Display for Query { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(ref with) = self.with { - with.fmt(f)?; - SpaceOrNewline.fmt(f)?; + write!(f, "{with} ")?; } - self.body.fmt(f)?; + write!(f, "{}", self.body)?; if let Some(ref order_by) = self.order_by { - f.write_str(" ")?; - order_by.fmt(f)?; + write!(f, " {order_by}")?; } - - if let Some(ref limit_clause) = self.limit_clause { - limit_clause.fmt(f)?; + if let Some(ref limit) = self.limit { + write!(f, " LIMIT {limit}")?; + } + if let Some(ref offset) = self.offset { + write!(f, " {offset}")?; + } + if !self.limit_by.is_empty() { + write!(f, " BY {}", display_separated(&self.limit_by, ", "))?; } if let Some(ref settings) = self.settings { - f.write_str(" SETTINGS ")?; - display_comma_separated(settings).fmt(f)?; + write!(f, " SETTINGS {}", display_comma_separated(settings))?; } if let Some(ref fetch) = self.fetch { - f.write_str(" ")?; - fetch.fmt(f)?; + write!(f, " {fetch}")?; } if !self.locks.is_empty() { - f.write_str(" ")?; - display_separated(&self.locks, " ").fmt(f)?; + write!(f, " {}", display_separated(&self.locks, " "))?; } if let Some(ref for_clause) = self.for_clause { - f.write_str(" ")?; - for_clause.fmt(f)?; + write!(f, " {}", for_clause)?; } if let Some(ref format) = self.format_clause { - f.write_str(" ")?; - format.fmt(f)?; - } - for pipe_operator in &self.pipe_operators { - f.write_str(" |> ")?; - pipe_operator.fmt(f)?; + write!(f, " {}", format)?; } Ok(()) } @@ -160,7 +152,6 @@ pub enum SetExpr { Values(Values), Insert(Statement), Update(Statement), - Delete(Statement), Table(Box
), } @@ -178,39 +169,28 @@ impl SetExpr { impl fmt::Display for SetExpr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - SetExpr::Select(s) => s.fmt(f), - SetExpr::Query(q) => { - f.write_str("(")?; - q.fmt(f)?; - f.write_str(")") - } - SetExpr::Values(v) => v.fmt(f), - SetExpr::Insert(v) => v.fmt(f), - SetExpr::Update(v) => v.fmt(f), - SetExpr::Delete(v) => v.fmt(f), - SetExpr::Table(t) => t.fmt(f), + SetExpr::Select(s) => write!(f, "{s}"), + SetExpr::Query(q) => write!(f, "({q})"), + SetExpr::Values(v) => write!(f, "{v}"), + SetExpr::Insert(v) => write!(f, "{v}"), + SetExpr::Update(v) => write!(f, "{v}"), + SetExpr::Table(t) => write!(f, "{t}"), SetExpr::SetOperation { left, right, op, set_quantifier, } => { - left.fmt(f)?; - SpaceOrNewline.fmt(f)?; - op.fmt(f)?; + write!(f, "{left} {op}")?; match set_quantifier { SetQuantifier::All | SetQuantifier::Distinct | SetQuantifier::ByName | SetQuantifier::AllByName - | SetQuantifier::DistinctByName => { - f.write_str(" ")?; - set_quantifier.fmt(f)?; - } - SetQuantifier::None => {} + | SetQuantifier::DistinctByName => write!(f, " {set_quantifier}")?, + SetQuantifier::None => write!(f, "{set_quantifier}")?, } - SpaceOrNewline.fmt(f)?; - right.fmt(f)?; + write!(f, " {right}")?; Ok(()) } } @@ -224,7 +204,6 @@ pub enum SetOperator { Union, Except, Intersect, - Minus, } impl fmt::Display for SetOperator { @@ -233,7 +212,6 @@ impl fmt::Display for SetOperator { SetOperator::Union => "UNION", SetOperator::Except => "EXCEPT", SetOperator::Intersect => "INTERSECT", - SetOperator::Minus => "MINUS", }) } } @@ -261,7 +239,7 @@ impl fmt::Display for SetQuantifier { SetQuantifier::ByName => write!(f, "BY NAME"), SetQuantifier::AllByName => write!(f, "ALL BY NAME"), SetQuantifier::DistinctByName => write!(f, "DISTINCT BY NAME"), - SetQuantifier::None => Ok(()), + SetQuantifier::None => write!(f, ""), } } } @@ -291,19 +269,6 @@ impl fmt::Display for Table { } } -/// What did this select look like? -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum SelectFlavor { - /// `SELECT *` - Standard, - /// `FROM ... SELECT *` - FromFirst, - /// `FROM *` - FromFirstNoSelect, -} - /// A restricted variant of `SELECT` (without CTEs/`ORDER BY`), which may /// appear either as the only body item of a `Query`, or as an operand /// to a set operation like `UNION`. @@ -311,9 +276,6 @@ pub enum SelectFlavor { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct Select { - /// Token for the `SELECT` keyword - pub select_token: AttachedToken, - /// `SELECT [DISTINCT] ...` pub distinct: Option, /// MSSQL syntax: `TOP () [ PERCENT ] [ WITH TIES ]` pub top: Option, @@ -321,11 +283,6 @@ pub struct Select { pub top_before_distinct: bool, /// projection expressions pub projection: Vec, - /// Excluded columns from the projection expression which are not specified - /// directly after a wildcard. - /// - /// [Redshift](https://docs.aws.amazon.com/redshift/latest/dg/r_EXCLUDE_list.html) - pub exclude: Option, /// INTO pub into: Option, /// FROM @@ -346,7 +303,7 @@ pub struct Select { /// DISTRIBUTE BY (Hive) pub distribute_by: Vec, /// SORT BY (Hive) - pub sort_by: Vec, + pub sort_by: Vec, /// HAVING pub having: Option, /// WINDOW AS @@ -362,145 +319,95 @@ pub struct Select { pub value_table_mode: Option, /// STARTING WITH .. CONNECT BY pub connect_by: Option, - /// Was this a FROM-first query? - pub flavor: SelectFlavor, } impl fmt::Display for Select { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.flavor { - SelectFlavor::Standard => { - write!(f, "SELECT")?; - } - SelectFlavor::FromFirst => { - write!(f, "FROM {} SELECT", display_comma_separated(&self.from))?; - } - SelectFlavor::FromFirstNoSelect => { - write!(f, "FROM {}", display_comma_separated(&self.from))?; - } - } + write!(f, "SELECT")?; if let Some(value_table_mode) = self.value_table_mode { - f.write_str(" ")?; - value_table_mode.fmt(f)?; + write!(f, " {value_table_mode}")?; } if let Some(ref top) = self.top { if self.top_before_distinct { - f.write_str(" ")?; - top.fmt(f)?; + write!(f, " {top}")?; } } if let Some(ref distinct) = self.distinct { - f.write_str(" ")?; - distinct.fmt(f)?; + write!(f, " {distinct}")?; } if let Some(ref top) = self.top { if !self.top_before_distinct { - f.write_str(" ")?; - top.fmt(f)?; + write!(f, " {top}")?; } } - if !self.projection.is_empty() { - indented_list(f, &self.projection)?; - } - - if let Some(exclude) = &self.exclude { - write!(f, " {exclude}")?; - } + write!(f, " {}", display_comma_separated(&self.projection))?; if let Some(ref into) = self.into { - f.write_str(" ")?; - into.fmt(f)?; + write!(f, " {into}")?; } - if self.flavor == SelectFlavor::Standard && !self.from.is_empty() { - SpaceOrNewline.fmt(f)?; - f.write_str("FROM")?; - indented_list(f, &self.from)?; + if !self.from.is_empty() { + write!(f, " FROM {}", display_comma_separated(&self.from))?; } if !self.lateral_views.is_empty() { for lv in &self.lateral_views { - lv.fmt(f)?; + write!(f, "{lv}")?; } } if let Some(ref prewhere) = self.prewhere { - f.write_str(" PREWHERE ")?; - prewhere.fmt(f)?; + write!(f, " PREWHERE {prewhere}")?; } if let Some(ref selection) = self.selection { - SpaceOrNewline.fmt(f)?; - f.write_str("WHERE")?; - SpaceOrNewline.fmt(f)?; - Indent(selection).fmt(f)?; + write!(f, " WHERE {selection}")?; } match &self.group_by { - GroupByExpr::All(_) => { - SpaceOrNewline.fmt(f)?; - self.group_by.fmt(f)?; - } + GroupByExpr::All(_) => write!(f, " {}", self.group_by)?, GroupByExpr::Expressions(exprs, _) => { if !exprs.is_empty() { - SpaceOrNewline.fmt(f)?; - self.group_by.fmt(f)?; + write!(f, " {}", self.group_by)? } } } if !self.cluster_by.is_empty() { - SpaceOrNewline.fmt(f)?; - f.write_str("CLUSTER BY")?; - SpaceOrNewline.fmt(f)?; - Indent(display_comma_separated(&self.cluster_by)).fmt(f)?; + write!( + f, + " CLUSTER BY {}", + display_comma_separated(&self.cluster_by) + )?; } if !self.distribute_by.is_empty() { - SpaceOrNewline.fmt(f)?; - f.write_str("DISTRIBUTE BY")?; - SpaceOrNewline.fmt(f)?; - display_comma_separated(&self.distribute_by).fmt(f)?; + write!( + f, + " DISTRIBUTE BY {}", + display_comma_separated(&self.distribute_by) + )?; } if !self.sort_by.is_empty() { - SpaceOrNewline.fmt(f)?; - f.write_str("SORT BY")?; - SpaceOrNewline.fmt(f)?; - Indent(display_comma_separated(&self.sort_by)).fmt(f)?; + write!(f, " SORT BY {}", display_comma_separated(&self.sort_by))?; } if let Some(ref having) = self.having { - SpaceOrNewline.fmt(f)?; - f.write_str("HAVING")?; - SpaceOrNewline.fmt(f)?; - Indent(having).fmt(f)?; + write!(f, " HAVING {having}")?; } if self.window_before_qualify { if !self.named_window.is_empty() { - SpaceOrNewline.fmt(f)?; - f.write_str("WINDOW")?; - SpaceOrNewline.fmt(f)?; - display_comma_separated(&self.named_window).fmt(f)?; + write!(f, " WINDOW {}", display_comma_separated(&self.named_window))?; } if let Some(ref qualify) = self.qualify { - SpaceOrNewline.fmt(f)?; - f.write_str("QUALIFY")?; - SpaceOrNewline.fmt(f)?; - qualify.fmt(f)?; + write!(f, " QUALIFY {qualify}")?; } } else { if let Some(ref qualify) = self.qualify { - SpaceOrNewline.fmt(f)?; - f.write_str("QUALIFY")?; - SpaceOrNewline.fmt(f)?; - qualify.fmt(f)?; + write!(f, " QUALIFY {qualify}")?; } if !self.named_window.is_empty() { - SpaceOrNewline.fmt(f)?; - f.write_str("WINDOW")?; - SpaceOrNewline.fmt(f)?; - display_comma_separated(&self.named_window).fmt(f)?; + write!(f, " WINDOW {}", display_comma_separated(&self.named_window))?; } } if let Some(ref connect_by) = self.connect_by { - SpaceOrNewline.fmt(f)?; - connect_by.fmt(f)?; + write!(f, " {connect_by}")?; } Ok(()) } @@ -598,20 +505,18 @@ impl fmt::Display for NamedWindowDefinition { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct With { - /// Token for the "WITH" keyword - pub with_token: AttachedToken, pub recursive: bool, pub cte_tables: Vec, } impl fmt::Display for With { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("WITH ")?; - if self.recursive { - f.write_str("RECURSIVE ")?; - } - display_comma_separated(&self.cte_tables).fmt(f)?; - Ok(()) + write!( + f, + "WITH {}{}", + if self.recursive { "RECURSIVE " } else { "" }, + display_comma_separated(&self.cte_tables) + ) } } @@ -651,31 +556,13 @@ pub struct Cte { pub query: Box, pub from: Option, pub materialized: Option, - /// Token for the closing parenthesis - pub closing_paren_token: AttachedToken, } impl fmt::Display for Cte { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.materialized.as_ref() { - None => { - self.alias.fmt(f)?; - f.write_str(" AS (")?; - NewLine.fmt(f)?; - Indent(&self.query).fmt(f)?; - NewLine.fmt(f)?; - f.write_str(")")?; - } - Some(materialized) => { - self.alias.fmt(f)?; - f.write_str(" AS ")?; - materialized.fmt(f)?; - f.write_str(" (")?; - NewLine.fmt(f)?; - Indent(&self.query).fmt(f)?; - NewLine.fmt(f)?; - f.write_str(")")?; - } + None => write!(f, "{} AS ({})", self.alias, self.query)?, + Some(materialized) => write!(f, "{} AS {materialized} ({})", self.alias, self.query)?, }; if let Some(ref fr) = self.from { write!(f, " FROM {fr}")?; @@ -684,20 +571,6 @@ impl fmt::Display for Cte { } } -/// Represents an expression behind a wildcard expansion in a projection. -/// `SELECT T.* FROM T; -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum SelectItemQualifiedWildcardKind { - /// Expression is an object name. - /// e.g. `alias.*` or even `schema.table.*` - ObjectName(ObjectName), - /// Select star on an arbitrary expression. - /// e.g. `STRUCT('foo').*` - Expr(Expr), -} - /// One item of the comma-separated list following `SELECT` #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -707,24 +580,12 @@ pub enum SelectItem { UnnamedExpr(Expr), /// An expression, followed by `[ AS ] alias` ExprWithAlias { expr: Expr, alias: Ident }, - /// An expression, followed by a wildcard expansion. - /// e.g. `alias.*`, `STRUCT('foo').*` - QualifiedWildcard(SelectItemQualifiedWildcardKind, WildcardAdditionalOptions), + /// `alias.*` or even `schema.table.*` + QualifiedWildcard(ObjectName, WildcardAdditionalOptions), /// An unqualified `*` Wildcard(WildcardAdditionalOptions), } -impl fmt::Display for SelectItemQualifiedWildcardKind { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match &self { - SelectItemQualifiedWildcardKind::ObjectName(object_name) => { - write!(f, "{object_name}.*") - } - SelectItemQualifiedWildcardKind::Expr(expr) => write!(f, "{expr}.*"), - } - } -} - /// Single aliased identifier /// /// # Syntax @@ -746,12 +607,10 @@ impl fmt::Display for IdentWithAlias { } /// Additional options for wildcards, e.g. Snowflake `EXCLUDE`/`RENAME` and Bigquery `EXCEPT`. -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Default)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct WildcardAdditionalOptions { - /// The wildcard token `*` - pub wildcard_token: AttachedToken, /// `[ILIKE...]`. /// Snowflake syntax: pub opt_ilike: Option, @@ -769,19 +628,6 @@ pub struct WildcardAdditionalOptions { pub opt_rename: Option, } -impl Default for WildcardAdditionalOptions { - fn default() -> Self { - Self { - wildcard_token: TokenWithSpan::wrap(Token::Mul).into(), - opt_ilike: None, - opt_exclude: None, - opt_except: None, - opt_replace: None, - opt_rename: None, - } - } -} - impl fmt::Display for WildcardAdditionalOptions { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(ilike) = &self.opt_ilike { @@ -988,21 +834,18 @@ impl fmt::Display for ReplaceSelectElement { impl fmt::Display for SelectItem { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - use core::fmt::Write; match &self { - SelectItem::UnnamedExpr(expr) => expr.fmt(f), - SelectItem::ExprWithAlias { expr, alias } => { - expr.fmt(f)?; - f.write_str(" AS ")?; - alias.fmt(f) - } - SelectItem::QualifiedWildcard(kind, additional_options) => { - kind.fmt(f)?; - additional_options.fmt(f) + SelectItem::UnnamedExpr(expr) => write!(f, "{expr}"), + SelectItem::ExprWithAlias { expr, alias } => write!(f, "{expr} AS {alias}"), + SelectItem::QualifiedWildcard(prefix, additional_options) => { + write!(f, "{prefix}.*")?; + write!(f, "{additional_options}")?; + Ok(()) } SelectItem::Wildcard(additional_options) => { - f.write_char('*')?; - additional_options.fmt(f) + write!(f, "*")?; + write!(f, "{additional_options}")?; + Ok(()) } } } @@ -1018,10 +861,9 @@ pub struct TableWithJoins { impl fmt::Display for TableWithJoins { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.relation.fmt(f)?; + write!(f, "{}", self.relation)?; for join in &self.joins { - SpaceOrNewline.fmt(f)?; - join.fmt(f)?; + write!(f, "{join}")?; } Ok(()) } @@ -1056,7 +898,7 @@ impl fmt::Display for ConnectBy { #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct Setting { pub key: Ident, - pub value: Expr, + pub value: Value, } impl fmt::Display for Setting { @@ -1090,26 +932,6 @@ impl fmt::Display for ExprWithAlias { } } -/// An expression optionally followed by an alias and order by options. -/// -/// Example: -/// ```sql -/// 42 AS myint ASC -/// ``` -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct ExprWithAliasAndOrderBy { - pub expr: ExprWithAlias, - pub order_by: OrderByOptions, -} - -impl fmt::Display for ExprWithAliasAndOrderBy { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}{}", self.expr, self.order_by) - } -} - /// Arguments to a table-valued function #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -1123,81 +945,6 @@ pub struct TableFunctionArgs { pub settings: Option>, } -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum TableIndexHintType { - Use, - Ignore, - Force, -} - -impl fmt::Display for TableIndexHintType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(match self { - TableIndexHintType::Use => "USE", - TableIndexHintType::Ignore => "IGNORE", - TableIndexHintType::Force => "FORCE", - }) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum TableIndexType { - Index, - Key, -} - -impl fmt::Display for TableIndexType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(match self { - TableIndexType::Index => "INDEX", - TableIndexType::Key => "KEY", - }) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum TableIndexHintForClause { - Join, - OrderBy, - GroupBy, -} - -impl fmt::Display for TableIndexHintForClause { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(match self { - TableIndexHintForClause::Join => "JOIN", - TableIndexHintForClause::OrderBy => "ORDER BY", - TableIndexHintForClause::GroupBy => "GROUP BY", - }) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct TableIndexHints { - pub hint_type: TableIndexHintType, - pub index_type: TableIndexType, - pub for_clause: Option, - pub index_names: Vec, -} - -impl fmt::Display for TableIndexHints { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{} {} ", self.hint_type, self.index_type)?; - if let Some(for_clause) = &self.for_clause { - write!(f, "FOR {for_clause} ")?; - } - write!(f, "({})", display_comma_separated(&self.index_names)) - } -} - /// A table name or a parenthesized subquery with an optional alias #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -1227,14 +974,6 @@ pub enum TableFactor { with_ordinality: bool, /// [Partition selection](https://dev.mysql.com/doc/refman/8.0/en/partitioning-selection.html), supported by MySQL. partitions: Vec, - /// Optional PartiQL JsonPath: - json_path: Option, - /// Optional table sample modifier - /// See: - sample: Option, - /// Optional index hints(mysql) - /// See: - index_hints: Vec, }, Derived { lateral: bool, @@ -1297,27 +1036,6 @@ pub enum TableFactor { /// The alias for the table. alias: Option, }, - /// The MSSQL's `OPENJSON` table-valued function. - /// - /// ```sql - /// OPENJSON( jsonExpression [ , path ] ) [ ] - /// - /// ::= WITH ( { colName type [ column_path ] [ AS JSON ] } [ ,...n ] ) - /// ```` - /// - /// Reference: - OpenJsonTable { - /// The JSON expression to be evaluated. It must evaluate to a json string - json_expr: Expr, - /// The path to the array or object to be iterated over. - /// It must evaluate to a json array or object. - json_path: Option, - /// The columns to be extracted from each element of the array or object. - /// Each column must have a name and a type. - columns: Vec, - /// The alias for the table. - alias: Option, - }, /// Represents a parenthesized table factor. The SQL spec only allows a /// join expression (`(foo bar [ baz ... ])`) to be nested, /// possibly several times. @@ -1345,7 +1063,7 @@ pub enum TableFactor { /// /// Syntax: /// ```sql - /// table UNPIVOT [ { INCLUDE | EXCLUDE } NULLS ] (value FOR name IN (column1, [ column2, ... ])) [ alias ] + /// table UNPIVOT(value FOR name IN (column1, [ column2, ... ])) [ alias ] /// ``` /// /// See . @@ -1354,7 +1072,6 @@ pub enum TableFactor { value: Ident, name: Ident, columns: Vec, - null_inclusion: Option, alias: Option, }, /// A `MATCH_RECOGNIZE` operation on a table. @@ -1378,214 +1095,6 @@ pub enum TableFactor { symbols: Vec, alias: Option, }, - /// The `XMLTABLE` table-valued function. - /// Part of the SQL standard, supported by PostgreSQL, Oracle, and DB2. - /// - /// - /// - /// ```sql - /// SELECT xmltable.* - /// FROM xmldata, - /// XMLTABLE('//ROWS/ROW' - /// PASSING data - /// COLUMNS id int PATH '@id', - /// ordinality FOR ORDINALITY, - /// "COUNTRY_NAME" text, - /// country_id text PATH 'COUNTRY_ID', - /// size_sq_km float PATH 'SIZE[@unit = "sq_km"]', - /// size_other text PATH 'concat(SIZE[@unit!="sq_km"], " ", SIZE[@unit!="sq_km"]/@unit)', - /// premier_name text PATH 'PREMIER_NAME' DEFAULT 'not specified' - /// ); - /// ```` - XmlTable { - /// Optional XMLNAMESPACES clause (empty if not present) - namespaces: Vec, - /// The row-generating XPath expression. - row_expression: Expr, - /// The PASSING clause specifying the document expression. - passing: XmlPassingClause, - /// The columns to be extracted from each generated row. - columns: Vec, - /// The alias for the table. - alias: Option, - }, -} - -/// The table sample modifier options -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum TableSampleKind { - /// Table sample located before the table alias option - BeforeTableAlias(Box), - /// Table sample located after the table alias option - AfterTableAlias(Box), -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct TableSample { - pub modifier: TableSampleModifier, - pub name: Option, - pub quantity: Option, - pub seed: Option, - pub bucket: Option, - pub offset: Option, -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum TableSampleModifier { - Sample, - TableSample, -} - -impl fmt::Display for TableSampleModifier { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - TableSampleModifier::Sample => write!(f, "SAMPLE")?, - TableSampleModifier::TableSample => write!(f, "TABLESAMPLE")?, - } - Ok(()) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct TableSampleQuantity { - pub parenthesized: bool, - pub value: Expr, - pub unit: Option, -} - -impl fmt::Display for TableSampleQuantity { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.parenthesized { - write!(f, "(")?; - } - write!(f, "{}", self.value)?; - if let Some(unit) = &self.unit { - write!(f, " {unit}")?; - } - if self.parenthesized { - write!(f, ")")?; - } - Ok(()) - } -} - -/// The table sample method names -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum TableSampleMethod { - Row, - Bernoulli, - System, - Block, -} - -impl fmt::Display for TableSampleMethod { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - TableSampleMethod::Bernoulli => write!(f, "BERNOULLI"), - TableSampleMethod::Row => write!(f, "ROW"), - TableSampleMethod::System => write!(f, "SYSTEM"), - TableSampleMethod::Block => write!(f, "BLOCK"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct TableSampleSeed { - pub modifier: TableSampleSeedModifier, - pub value: Value, -} - -impl fmt::Display for TableSampleSeed { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{} ({})", self.modifier, self.value)?; - Ok(()) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum TableSampleSeedModifier { - Repeatable, - Seed, -} - -impl fmt::Display for TableSampleSeedModifier { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - TableSampleSeedModifier::Repeatable => write!(f, "REPEATABLE"), - TableSampleSeedModifier::Seed => write!(f, "SEED"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum TableSampleUnit { - Rows, - Percent, -} - -impl fmt::Display for TableSampleUnit { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - TableSampleUnit::Percent => write!(f, "PERCENT"), - TableSampleUnit::Rows => write!(f, "ROWS"), - } - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct TableSampleBucket { - pub bucket: Value, - pub total: Value, - pub on: Option, -} - -impl fmt::Display for TableSampleBucket { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "BUCKET {} OUT OF {}", self.bucket, self.total)?; - if let Some(on) = &self.on { - write!(f, " ON {on}")?; - } - Ok(()) - } -} -impl fmt::Display for TableSample { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.modifier)?; - if let Some(name) = &self.name { - write!(f, " {name}")?; - } - if let Some(quantity) = &self.quantity { - write!(f, " {quantity}")?; - } - if let Some(seed) = &self.seed { - write!(f, " {seed}")?; - } - if let Some(bucket) = &self.bucket { - write!(f, " ({bucket})")?; - } - if let Some(offset) = &self.offset { - write!(f, " OFFSET {offset}")?; - } - Ok(()) - } } /// The source of values in a `PIVOT` operation. @@ -1660,7 +1169,7 @@ impl fmt::Display for RowsPerMatch { RowsPerMatch::AllRows(mode) => { write!(f, "ALL ROWS PER MATCH")?; if let Some(mode) = mode { - write!(f, " {mode}")?; + write!(f, " {}", mode)?; } Ok(()) } @@ -1786,7 +1295,7 @@ impl fmt::Display for MatchRecognizePattern { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use MatchRecognizePattern::*; match self { - Symbol(symbol) => write!(f, "{symbol}"), + Symbol(symbol) => write!(f, "{}", symbol), Exclude(symbol) => write!(f, "{{- {symbol} -}}"), Permute(symbols) => write!(f, "PERMUTE({})", display_comma_separated(symbols)), Concat(patterns) => write!(f, "{}", display_separated(patterns, " ")), @@ -1845,14 +1354,8 @@ impl fmt::Display for TableFactor { version, partitions, with_ordinality, - json_path, - sample, - index_hints, } => { - name.fmt(f)?; - if let Some(json_path) = json_path { - json_path.fmt(f)?; - } + write!(f, "{name}")?; if !partitions.is_empty() { write!(f, "PARTITION ({})", display_comma_separated(partitions))?; } @@ -1870,24 +1373,15 @@ impl fmt::Display for TableFactor { if *with_ordinality { write!(f, " WITH ORDINALITY")?; } - if let Some(TableSampleKind::BeforeTableAlias(sample)) = sample { - write!(f, " {sample}")?; - } if let Some(alias) = alias { write!(f, " AS {alias}")?; } - if !index_hints.is_empty() { - write!(f, " {}", display_separated(index_hints, " "))?; - } if !with_hints.is_empty() { write!(f, " WITH ({})", display_comma_separated(with_hints))?; } if let Some(version) = version { write!(f, "{version}")?; } - if let Some(TableSampleKind::AfterTableAlias(sample)) = sample { - write!(f, " {sample}")?; - } Ok(()) } TableFactor::Derived { @@ -1898,11 +1392,7 @@ impl fmt::Display for TableFactor { if *lateral { write!(f, "LATERAL ")?; } - f.write_str("(")?; - NewLine.fmt(f)?; - Indent(subquery).fmt(f)?; - NewLine.fmt(f)?; - f.write_str(")")?; + write!(f, "({subquery})")?; if let Some(alias) = alias { write!(f, " AS {alias}")?; } @@ -1971,25 +1461,6 @@ impl fmt::Display for TableFactor { } Ok(()) } - TableFactor::OpenJsonTable { - json_expr, - json_path, - columns, - alias, - } => { - write!(f, "OPENJSON({json_expr}")?; - if let Some(json_path) = json_path { - write!(f, ", {json_path}")?; - } - write!(f, ")")?; - if !columns.is_empty() { - write!(f, " WITH ({})", display_comma_separated(columns))?; - } - if let Some(alias) = alias { - write!(f, " AS {alias}")?; - } - Ok(()) - } TableFactor::NestedJoin { table_with_joins, alias, @@ -2025,19 +1496,15 @@ impl fmt::Display for TableFactor { } TableFactor::Unpivot { table, - null_inclusion, value, name, columns, alias, } => { - write!(f, "{table} UNPIVOT")?; - if let Some(null_inclusion) = null_inclusion { - write!(f, " {null_inclusion} ")?; - } write!( f, - "({} FOR {} IN ({}))", + "{} UNPIVOT({} FOR {} IN ({}))", + table, value, name, display_comma_separated(columns) @@ -2081,31 +1548,6 @@ impl fmt::Display for TableFactor { } Ok(()) } - TableFactor::XmlTable { - row_expression, - passing, - columns, - alias, - namespaces, - } => { - write!(f, "XMLTABLE(")?; - if !namespaces.is_empty() { - write!( - f, - "XMLNAMESPACES({}), ", - display_comma_separated(namespaces) - )?; - } - write!( - f, - "{row_expression}{passing} COLUMNS {columns})", - columns = display_comma_separated(columns) - )?; - if let Some(alias) = alias { - write!(f, " AS {alias}")?; - } - Ok(()) - } } } } @@ -2115,7 +1557,7 @@ impl fmt::Display for TableFactor { #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct TableAlias { pub name: Ident, - pub columns: Vec, + pub columns: Vec, } impl fmt::Display for TableAlias { @@ -2128,58 +1570,17 @@ impl fmt::Display for TableAlias { } } -/// SQL column definition in a table expression alias. -/// Most of the time, the data type is not specified. -/// But some table-valued functions do require specifying the data type. -/// -/// See -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct TableAliasColumnDef { - /// Column name alias - pub name: Ident, - /// Some table-valued functions require specifying the data type in the alias. - pub data_type: Option, -} - -impl TableAliasColumnDef { - /// Create a new table alias column definition with only a name and no type - pub fn from_name>(name: S) -> Self { - TableAliasColumnDef { - name: Ident::new(name), - data_type: None, - } - } -} - -impl fmt::Display for TableAliasColumnDef { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.name)?; - if let Some(ref data_type) = self.data_type { - write!(f, " {data_type}")?; - } - Ok(()) - } -} - #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum TableVersion { - /// When the table version is defined using `FOR SYSTEM_TIME AS OF`. - /// For example: `SELECT * FROM tbl FOR SYSTEM_TIME AS OF TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 HOUR)` ForSystemTimeAsOf(Expr), - /// When the table version is defined using a function. - /// For example: `SELECT * FROM tbl AT(TIMESTAMP => '2020-08-14 09:30:00')` - Function(Expr), } impl Display for TableVersion { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { TableVersion::ForSystemTimeAsOf(e) => write!(f, " FOR SYSTEM_TIME AS OF {e}")?, - TableVersion::Function(func) => write!(f, " {func}")?, } Ok(()) } @@ -2206,7 +1607,7 @@ impl fmt::Display for Join { } fn suffix(constraint: &'_ JoinConstraint) -> impl fmt::Display + '_ { struct Suffix<'a>(&'a JoinConstraint); - impl fmt::Display for Suffix<'_> { + impl<'a> fmt::Display for Suffix<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.0 { JoinConstraint::On(expr) => write!(f, " ON {expr}"), @@ -2220,104 +1621,78 @@ impl fmt::Display for Join { Suffix(constraint) } if self.global { - write!(f, "GLOBAL ")?; + write!(f, " GLOBAL")?; } match &self.join_operator { - JoinOperator::Join(constraint) => f.write_fmt(format_args!( - "{}JOIN {}{}", + JoinOperator::Inner(constraint) => write!( + f, + " {}JOIN {}{}", prefix(constraint), self.relation, suffix(constraint) - )), - JoinOperator::Inner(constraint) => f.write_fmt(format_args!( - "{}INNER JOIN {}{}", + ), + JoinOperator::LeftOuter(constraint) => write!( + f, + " {}LEFT JOIN {}{}", prefix(constraint), self.relation, suffix(constraint) - )), - JoinOperator::Left(constraint) => f.write_fmt(format_args!( - "{}LEFT JOIN {}{}", + ), + JoinOperator::RightOuter(constraint) => write!( + f, + " {}RIGHT JOIN {}{}", prefix(constraint), self.relation, suffix(constraint) - )), - JoinOperator::LeftOuter(constraint) => f.write_fmt(format_args!( - "{}LEFT OUTER JOIN {}{}", + ), + JoinOperator::FullOuter(constraint) => write!( + f, + " {}FULL JOIN {}{}", prefix(constraint), self.relation, suffix(constraint) - )), - JoinOperator::Right(constraint) => f.write_fmt(format_args!( - "{}RIGHT JOIN {}{}", + ), + JoinOperator::CrossJoin => write!(f, " CROSS JOIN {}", self.relation), + JoinOperator::LeftSemi(constraint) => write!( + f, + " {}LEFT SEMI JOIN {}{}", prefix(constraint), self.relation, suffix(constraint) - )), - JoinOperator::RightOuter(constraint) => f.write_fmt(format_args!( - "{}RIGHT OUTER JOIN {}{}", + ), + JoinOperator::RightSemi(constraint) => write!( + f, + " {}RIGHT SEMI JOIN {}{}", prefix(constraint), self.relation, suffix(constraint) - )), - JoinOperator::FullOuter(constraint) => f.write_fmt(format_args!( - "{}FULL JOIN {}{}", + ), + JoinOperator::LeftAnti(constraint) => write!( + f, + " {}LEFT ANTI JOIN {}{}", prefix(constraint), self.relation, suffix(constraint) - )), - JoinOperator::CrossJoin => f.write_fmt(format_args!("CROSS JOIN {}", self.relation)), - JoinOperator::Semi(constraint) => f.write_fmt(format_args!( - "{}SEMI JOIN {}{}", + ), + JoinOperator::RightAnti(constraint) => write!( + f, + " {}RIGHT ANTI JOIN {}{}", prefix(constraint), self.relation, suffix(constraint) - )), - JoinOperator::LeftSemi(constraint) => f.write_fmt(format_args!( - "{}LEFT SEMI JOIN {}{}", - prefix(constraint), - self.relation, - suffix(constraint) - )), - JoinOperator::RightSemi(constraint) => f.write_fmt(format_args!( - "{}RIGHT SEMI JOIN {}{}", - prefix(constraint), - self.relation, - suffix(constraint) - )), - JoinOperator::Anti(constraint) => f.write_fmt(format_args!( - "{}ANTI JOIN {}{}", - prefix(constraint), - self.relation, - suffix(constraint) - )), - JoinOperator::LeftAnti(constraint) => f.write_fmt(format_args!( - "{}LEFT ANTI JOIN {}{}", - prefix(constraint), - self.relation, - suffix(constraint) - )), - JoinOperator::RightAnti(constraint) => f.write_fmt(format_args!( - "{}RIGHT ANTI JOIN {}{}", - prefix(constraint), - self.relation, - suffix(constraint) - )), - JoinOperator::CrossApply => f.write_fmt(format_args!("CROSS APPLY {}", self.relation)), - JoinOperator::OuterApply => f.write_fmt(format_args!("OUTER APPLY {}", self.relation)), + ), + JoinOperator::CrossApply => write!(f, " CROSS APPLY {}", self.relation), + JoinOperator::OuterApply => write!(f, " OUTER APPLY {}", self.relation), JoinOperator::AsOf { match_condition, constraint, - } => f.write_fmt(format_args!( - "ASOF JOIN {} MATCH_CONDITION ({match_condition}){}", + } => write!( + f, + " ASOF JOIN {} MATCH_CONDITION ({match_condition}){}", self.relation, suffix(constraint) - )), - JoinOperator::StraightJoin(constraint) => f.write_fmt(format_args!( - "STRAIGHT_JOIN {}{}", - self.relation, - suffix(constraint) - )), + ), } } } @@ -2326,22 +1701,15 @@ impl fmt::Display for Join { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum JoinOperator { - Join(JoinConstraint), Inner(JoinConstraint), - Left(JoinConstraint), LeftOuter(JoinConstraint), - Right(JoinConstraint), RightOuter(JoinConstraint), FullOuter(JoinConstraint), CrossJoin, - /// SEMI (non-standard) - Semi(JoinConstraint), /// LEFT SEMI (non-standard) LeftSemi(JoinConstraint), /// RIGHT SEMI (non-standard) RightSemi(JoinConstraint), - /// ANTI (non-standard) - Anti(JoinConstraint), /// LEFT ANTI (non-standard) LeftAnti(JoinConstraint), /// RIGHT ANTI (non-standard) @@ -2358,10 +1726,6 @@ pub enum JoinOperator { match_condition: Expr, constraint: JoinConstraint, }, - /// STRAIGHT_JOIN (non-standard) - /// - /// See . - StraightJoin(JoinConstraint), } #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -2369,55 +1733,35 @@ pub enum JoinOperator { #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum JoinConstraint { On(Expr), - Using(Vec), + Using(Vec), Natural, None, } -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum OrderByKind { - /// ALL syntax of [DuckDB] and [ClickHouse]. - /// - /// [DuckDB]: - /// [ClickHouse]: - All(OrderByOptions), - - /// Expressions - Expressions(Vec), -} - #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct OrderBy { - pub kind: OrderByKind, - + pub exprs: Vec, /// Optional: `INTERPOLATE` /// Supported by [ClickHouse syntax] + /// + /// [ClickHouse syntax]: pub interpolate: Option, } impl fmt::Display for OrderBy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ORDER BY")?; - match &self.kind { - OrderByKind::Expressions(exprs) => { - write!(f, " {}", display_comma_separated(exprs))?; - } - OrderByKind::All(all) => { - write!(f, " ALL{all}")?; - } + if !self.exprs.is_empty() { + write!(f, " {}", display_comma_separated(&self.exprs))?; } - if let Some(ref interpolate) = self.interpolate { match &interpolate.exprs { Some(exprs) => write!(f, " INTERPOLATE ({})", display_comma_separated(exprs))?, None => write!(f, " INTERPOLATE")?, } } - Ok(()) } } @@ -2428,7 +1772,10 @@ impl fmt::Display for OrderBy { #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct OrderByExpr { pub expr: Expr, - pub options: OrderByOptions, + /// Optional `ASC` or `DESC` + pub asc: Option, + /// Optional `NULLS FIRST` or `NULLS LAST` + pub nulls_first: Option, /// Optional: `WITH FILL` /// Supported by [ClickHouse syntax]: pub with_fill: Option, @@ -2436,9 +1783,19 @@ pub struct OrderByExpr { impl fmt::Display for OrderByExpr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}{}", self.expr, self.options)?; + write!(f, "{}", self.expr)?; + match self.asc { + Some(true) => write!(f, " ASC")?, + Some(false) => write!(f, " DESC")?, + None => (), + } + match self.nulls_first { + Some(true) => write!(f, " NULLS FIRST")?, + Some(false) => write!(f, " NULLS LAST")?, + None => (), + } if let Some(ref with_fill) = self.with_fill { - write!(f, " {with_fill}")? + write!(f, " {}", with_fill)? } Ok(()) } @@ -2461,13 +1818,13 @@ impl fmt::Display for WithFill { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "WITH FILL")?; if let Some(ref from) = self.from { - write!(f, " FROM {from}")?; + write!(f, " FROM {}", from)?; } if let Some(ref to) = self.to { - write!(f, " TO {to}")?; + write!(f, " TO {}", to)?; } if let Some(ref step) = self.step { - write!(f, " STEP {step}")?; + write!(f, " STEP {}", step)?; } Ok(()) } @@ -2496,90 +1853,12 @@ impl fmt::Display for InterpolateExpr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.column)?; if let Some(ref expr) = self.expr { - write!(f, " AS {expr}")?; + write!(f, " AS {}", expr)?; } Ok(()) } } -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct OrderByOptions { - /// Optional `ASC` or `DESC` - pub asc: Option, - /// Optional `NULLS FIRST` or `NULLS LAST` - pub nulls_first: Option, -} - -impl fmt::Display for OrderByOptions { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.asc { - Some(true) => write!(f, " ASC")?, - Some(false) => write!(f, " DESC")?, - None => (), - } - match self.nulls_first { - Some(true) => write!(f, " NULLS FIRST")?, - Some(false) => write!(f, " NULLS LAST")?, - None => (), - } - Ok(()) - } -} - -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum LimitClause { - /// Standard SQL syntax - /// - /// `LIMIT [BY ,,...] [OFFSET ]` - LimitOffset { - /// `LIMIT { | ALL }` - limit: Option, - /// `OFFSET [ { ROW | ROWS } ]` - offset: Option, - /// `BY { ,,... } }` - /// - /// [ClickHouse](https://clickhouse.com/docs/sql-reference/statements/select/limit-by) - limit_by: Vec, - }, - /// [MySQL]-specific syntax; the order of expressions is reversed. - /// - /// `LIMIT , ` - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/select.html - OffsetCommaLimit { offset: Expr, limit: Expr }, -} - -impl fmt::Display for LimitClause { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - LimitClause::LimitOffset { - limit, - limit_by, - offset, - } => { - if let Some(ref limit) = limit { - write!(f, " LIMIT {limit}")?; - } - if let Some(ref offset) = offset { - write!(f, " {offset}")?; - } - if !limit_by.is_empty() { - debug_assert!(limit.is_some()); - write!(f, " BY {}", display_separated(limit_by, ", "))?; - } - Ok(()) - } - LimitClause::OffsetCommaLimit { offset, limit } => { - write!(f, " LIMIT {offset}, {limit}") - } - } - } -} - #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -2615,296 +1894,6 @@ impl fmt::Display for OffsetRows { } } -/// Pipe syntax, first introduced in Google BigQuery. -/// Example: -/// -/// ```sql -/// FROM Produce -/// |> WHERE sales > 0 -/// |> AGGREGATE SUM(sales) AS total_sales, COUNT(*) AS num_sales -/// GROUP BY item; -/// ``` -/// -/// See -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum PipeOperator { - /// Limits the number of rows to return in a query, with an optional OFFSET clause to skip over rows. - /// - /// Syntax: `|> LIMIT [OFFSET ]` - /// - /// See more at - Limit { expr: Expr, offset: Option }, - /// Filters the results of the input table. - /// - /// Syntax: `|> WHERE ` - /// - /// See more at - Where { expr: Expr }, - /// `ORDER BY [ASC|DESC], ...` - OrderBy { exprs: Vec }, - /// Produces a new table with the listed columns, similar to the outermost SELECT clause in a table subquery in standard syntax. - /// - /// Syntax `|> SELECT [[AS] alias], ...` - /// - /// See more at - Select { exprs: Vec }, - /// Propagates the existing table and adds computed columns, similar to SELECT *, new_column in standard syntax. - /// - /// Syntax: `|> EXTEND [[AS] alias], ...` - /// - /// See more at - Extend { exprs: Vec }, - /// Replaces the value of a column in the current table, similar to SELECT * REPLACE (expression AS column) in standard syntax. - /// - /// Syntax: `|> SET = , ...` - /// - /// See more at - Set { assignments: Vec }, - /// Removes listed columns from the current table, similar to SELECT * EXCEPT (column) in standard syntax. - /// - /// Syntax: `|> DROP , ...` - /// - /// See more at - Drop { columns: Vec }, - /// Introduces a table alias for the input table, similar to applying the AS alias clause on a table subquery in standard syntax. - /// - /// Syntax: `|> AS ` - /// - /// See more at - As { alias: Ident }, - /// Performs aggregation on data across grouped rows or an entire table. - /// - /// Syntax: `|> AGGREGATE [[AS] alias], ...` - /// - /// Syntax: - /// ```norust - /// |> AGGREGATE [ [[AS] alias], ...] - /// GROUP BY [AS alias], ... - /// ``` - /// - /// See more at - Aggregate { - full_table_exprs: Vec, - group_by_expr: Vec, - }, - /// Selects a random sample of rows from the input table. - /// Syntax: `|> TABLESAMPLE SYSTEM (10 PERCENT) - /// See more at - TableSample { sample: Box }, - /// Renames columns in the input table. - /// - /// Syntax: `|> RENAME old_name AS new_name, ...` - /// - /// See more at - Rename { mappings: Vec }, - /// Combines the input table with one or more tables using UNION. - /// - /// Syntax: `|> UNION [ALL|DISTINCT] (), (), ...` - /// - /// See more at - Union { - set_quantifier: SetQuantifier, - queries: Vec, - }, - /// Returns only the rows that are present in both the input table and the specified tables. - /// - /// Syntax: `|> INTERSECT [DISTINCT] (), (), ...` - /// - /// See more at - Intersect { - set_quantifier: SetQuantifier, - queries: Vec, - }, - /// Returns only the rows that are present in the input table but not in the specified tables. - /// - /// Syntax: `|> EXCEPT DISTINCT (), (), ...` - /// - /// See more at - Except { - set_quantifier: SetQuantifier, - queries: Vec, - }, - /// Calls a table function or procedure that returns a table. - /// - /// Syntax: `|> CALL function_name(args) [AS alias]` - /// - /// See more at - Call { - function: Function, - alias: Option, - }, - /// Pivots data from rows to columns. - /// - /// Syntax: `|> PIVOT(aggregate_function(column) FOR pivot_column IN (value1, value2, ...)) [AS alias]` - /// - /// See more at - Pivot { - aggregate_functions: Vec, - value_column: Vec, - value_source: PivotValueSource, - alias: Option, - }, - /// The `UNPIVOT` pipe operator transforms columns into rows. - /// - /// Syntax: - /// ```sql - /// |> UNPIVOT(value_column FOR name_column IN (column1, column2, ...)) [alias] - /// ``` - /// - /// See more at - Unpivot { - value_column: Ident, - name_column: Ident, - unpivot_columns: Vec, - alias: Option, - }, - /// Joins the input table with another table. - /// - /// Syntax: `|> [JOIN_TYPE] JOIN
[alias] ON ` or `|> [JOIN_TYPE] JOIN
[alias] USING ()` - /// - /// See more at - Join(Join), -} - -impl fmt::Display for PipeOperator { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - PipeOperator::Select { exprs } => { - write!(f, "SELECT {}", display_comma_separated(exprs.as_slice())) - } - PipeOperator::Extend { exprs } => { - write!(f, "EXTEND {}", display_comma_separated(exprs.as_slice())) - } - PipeOperator::Set { assignments } => { - write!(f, "SET {}", display_comma_separated(assignments.as_slice())) - } - PipeOperator::Drop { columns } => { - write!(f, "DROP {}", display_comma_separated(columns.as_slice())) - } - PipeOperator::As { alias } => { - write!(f, "AS {alias}") - } - PipeOperator::Limit { expr, offset } => { - write!(f, "LIMIT {expr}")?; - if let Some(offset) = offset { - write!(f, " OFFSET {offset}")?; - } - Ok(()) - } - PipeOperator::Aggregate { - full_table_exprs, - group_by_expr, - } => { - write!(f, "AGGREGATE")?; - if !full_table_exprs.is_empty() { - write!( - f, - " {}", - display_comma_separated(full_table_exprs.as_slice()) - )?; - } - if !group_by_expr.is_empty() { - write!(f, " GROUP BY {}", display_comma_separated(group_by_expr))?; - } - Ok(()) - } - - PipeOperator::Where { expr } => { - write!(f, "WHERE {expr}") - } - PipeOperator::OrderBy { exprs } => { - write!(f, "ORDER BY {}", display_comma_separated(exprs.as_slice())) - } - - PipeOperator::TableSample { sample } => { - write!(f, "{sample}") - } - PipeOperator::Rename { mappings } => { - write!(f, "RENAME {}", display_comma_separated(mappings)) - } - PipeOperator::Union { - set_quantifier, - queries, - } => Self::fmt_set_operation(f, "UNION", set_quantifier, queries), - PipeOperator::Intersect { - set_quantifier, - queries, - } => Self::fmt_set_operation(f, "INTERSECT", set_quantifier, queries), - PipeOperator::Except { - set_quantifier, - queries, - } => Self::fmt_set_operation(f, "EXCEPT", set_quantifier, queries), - PipeOperator::Call { function, alias } => { - write!(f, "CALL {function}")?; - Self::fmt_optional_alias(f, alias) - } - PipeOperator::Pivot { - aggregate_functions, - value_column, - value_source, - alias, - } => { - write!( - f, - "PIVOT({} FOR {} IN ({}))", - display_comma_separated(aggregate_functions), - Expr::CompoundIdentifier(value_column.to_vec()), - value_source - )?; - Self::fmt_optional_alias(f, alias) - } - PipeOperator::Unpivot { - value_column, - name_column, - unpivot_columns, - alias, - } => { - write!( - f, - "UNPIVOT({} FOR {} IN ({}))", - value_column, - name_column, - display_comma_separated(unpivot_columns) - )?; - Self::fmt_optional_alias(f, alias) - } - PipeOperator::Join(join) => write!(f, "{join}"), - } - } -} - -impl PipeOperator { - /// Helper function to format optional alias for pipe operators - fn fmt_optional_alias(f: &mut fmt::Formatter<'_>, alias: &Option) -> fmt::Result { - if let Some(alias) = alias { - write!(f, " AS {alias}")?; - } - Ok(()) - } - - /// Helper function to format set operations (UNION, INTERSECT, EXCEPT) with queries - fn fmt_set_operation( - f: &mut fmt::Formatter<'_>, - operation: &str, - set_quantifier: &SetQuantifier, - queries: &[Query], - ) -> fmt::Result { - write!(f, "{operation}")?; - match set_quantifier { - SetQuantifier::None => {} - _ => { - write!(f, " {set_quantifier}")?; - } - } - write!(f, " ")?; - let parenthesized_queries: Vec = - queries.iter().map(|query| format!("({query})")).collect(); - write!(f, "{}", display_comma_separated(&parenthesized_queries)) - } -} - #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -3058,14 +2047,13 @@ pub struct Values { impl fmt::Display for Values { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("VALUES")?; + write!(f, "VALUES ")?; let prefix = if self.explicit_row { "ROW" } else { "" }; let mut delim = ""; for row in &self.rows { - f.write_str(delim)?; - delim = ","; - SpaceOrNewline.fmt(f)?; - Indent(format_args!("{prefix}({})", display_comma_separated(row))).fmt(f)?; + write!(f, "{delim}")?; + delim = ", "; + write!(f, "{prefix}({})", display_comma_separated(row))?; } Ok(()) } @@ -3095,18 +2083,13 @@ impl fmt::Display for SelectInto { /// e.g. GROUP BY year WITH ROLLUP WITH TOTALS /// /// [ClickHouse]: -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum GroupByWithModifier { Rollup, Cube, Totals, - /// Hive supports GROUP BY GROUPING SETS syntax. - /// e.g. GROUP BY year , month GROUPING SETS((year,month),(year),(month)) - /// - /// [Hive]: - GroupingSets(Expr), } impl fmt::Display for GroupByWithModifier { @@ -3115,9 +2098,6 @@ impl fmt::Display for GroupByWithModifier { GroupByWithModifier::Rollup => write!(f, "WITH ROLLUP"), GroupByWithModifier::Cube => write!(f, "WITH CUBE"), GroupByWithModifier::Totals => write!(f, "WITH TOTALS"), - GroupByWithModifier::GroupingSets(expr) => { - write!(f, "{expr}") - } } } } @@ -3152,9 +2132,8 @@ impl fmt::Display for GroupByExpr { Ok(()) } GroupByExpr::Expressions(col_names, modifiers) => { - f.write_str("GROUP BY")?; - SpaceOrNewline.fmt(f)?; - Indent(display_comma_separated(col_names)).fmt(f)?; + let col_names = display_comma_separated(col_names); + write!(f, "GROUP BY {col_names}")?; if !modifiers.is_empty() { write!(f, " {}", display_separated(modifiers, " "))?; } @@ -3178,35 +2157,12 @@ pub enum FormatClause { impl fmt::Display for FormatClause { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - FormatClause::Identifier(ident) => write!(f, "FORMAT {ident}"), + FormatClause::Identifier(ident) => write!(f, "FORMAT {}", ident), FormatClause::Null => write!(f, "FORMAT NULL"), } } } -/// FORMAT identifier in input context, specific to ClickHouse. -/// -/// [ClickHouse]: -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct InputFormatClause { - pub ident: Ident, - pub values: Vec, -} - -impl fmt::Display for InputFormatClause { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "FORMAT {}", self.ident)?; - - if !self.values.is_empty() { - write!(f, " {}", display_comma_separated(self.values.as_slice()))?; - } - - Ok(()) - } -} - /// FOR XML or FOR JSON clause, specific to MSSQL /// (formats the output of a query as XML or JSON) #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -3240,9 +2196,9 @@ impl fmt::Display for ForClause { without_array_wrapper, } => { write!(f, "FOR JSON ")?; - write!(f, "{for_json}")?; + write!(f, "{}", for_json)?; if let Some(root) = root { - write!(f, ", ROOT('{root}')")?; + write!(f, ", ROOT('{}')", root)?; } if *include_null_values { write!(f, ", INCLUDE_NULL_VALUES")?; @@ -3260,7 +2216,7 @@ impl fmt::Display for ForClause { r#type, } => { write!(f, "FOR XML ")?; - write!(f, "{for_xml}")?; + write!(f, "{}", for_xml)?; if *binary_base64 { write!(f, ", BINARY BASE64")?; } @@ -3268,7 +2224,7 @@ impl fmt::Display for ForClause { write!(f, ", TYPE")?; } if let Some(root) = root { - write!(f, ", ROOT('{root}')")?; + write!(f, ", ROOT('{}')", root)?; } if *elements { write!(f, ", ELEMENTS")?; @@ -3295,7 +2251,7 @@ impl fmt::Display for ForXml { ForXml::Raw(root) => { write!(f, "RAW")?; if let Some(root) = root { - write!(f, "('{root}')")?; + write!(f, "('{}')", root)?; } Ok(()) } @@ -3304,7 +2260,7 @@ impl fmt::Display for ForXml { ForXml::Path(root) => { write!(f, "PATH")?; if let Some(root) = root { - write!(f, "('{root}')")?; + write!(f, "('{}')", root)?; } Ok(()) } @@ -3367,7 +2323,7 @@ impl fmt::Display for JsonTableColumn { JsonTableColumn::Named(json_table_named_column) => { write!(f, "{json_table_named_column}") } - JsonTableColumn::ForOrdinality(ident) => write!(f, "{ident} FOR ORDINALITY"), + JsonTableColumn::ForOrdinality(ident) => write!(f, "{} FOR ORDINALITY", ident), JsonTableColumn::Nested(json_table_nested_column) => { write!(f, "{json_table_nested_column}") } @@ -3433,10 +2389,10 @@ impl fmt::Display for JsonTableNamedColumn { self.path )?; if let Some(on_empty) = &self.on_empty { - write!(f, " {on_empty} ON EMPTY")?; + write!(f, " {} ON EMPTY", on_empty)?; } if let Some(on_error) = &self.on_error { - write!(f, " {on_error} ON ERROR")?; + write!(f, " {} ON ERROR", on_error)?; } Ok(()) } @@ -3458,61 +2414,23 @@ impl fmt::Display for JsonTableColumnErrorHandling { match self { JsonTableColumnErrorHandling::Null => write!(f, "NULL"), JsonTableColumnErrorHandling::Default(json_string) => { - write!(f, "DEFAULT {json_string}") + write!(f, "DEFAULT {}", json_string) } JsonTableColumnErrorHandling::Error => write!(f, "ERROR"), } } } -/// A single column definition in MSSQL's `OPENJSON WITH` clause. -/// -/// ```sql -/// colName type [ column_path ] [ AS JSON ] -/// ``` -/// -/// Reference: -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct OpenJsonTableColumn { - /// The name of the column to be extracted. - pub name: Ident, - /// The type of the column to be extracted. - pub r#type: DataType, - /// The path to the column to be extracted. Must be a literal string. - pub path: Option, - /// The `AS JSON` option. - pub as_json: bool, -} - -impl fmt::Display for OpenJsonTableColumn { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{} {}", self.name, self.r#type)?; - if let Some(path) = &self.path { - write!(f, " '{}'", value::escape_single_quote_string(path))?; - } - if self.as_json { - write!(f, " AS JSON")?; - } - Ok(()) - } -} - /// BigQuery supports ValueTables which have 2 modes: -/// `SELECT [ALL | DISTINCT] AS STRUCT` -/// `SELECT [ALL | DISTINCT] AS VALUE` -/// +/// `SELECT AS STRUCT` +/// `SELECT AS VALUE` /// -/// #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum ValueTableMode { AsStruct, AsValue, - DistinctAsStruct, - DistinctAsValue, } impl fmt::Display for ValueTableMode { @@ -3520,151 +2438,6 @@ impl fmt::Display for ValueTableMode { match self { ValueTableMode::AsStruct => write!(f, "AS STRUCT"), ValueTableMode::AsValue => write!(f, "AS VALUE"), - ValueTableMode::DistinctAsStruct => write!(f, "DISTINCT AS STRUCT"), - ValueTableMode::DistinctAsValue => write!(f, "DISTINCT AS VALUE"), } } } - -/// The `FROM` clause of an `UPDATE TABLE` statement -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum UpdateTableFromKind { - /// Update Statement where the 'FROM' clause is before the 'SET' keyword (Supported by Snowflake) - /// For Example: `UPDATE FROM t1 SET t1.name='aaa'` - BeforeSet(Vec), - /// Update Statement where the 'FROM' clause is after the 'SET' keyword (Which is the standard way) - /// For Example: `UPDATE SET t1.name='aaa' FROM t1` - AfterSet(Vec), -} - -/// Defines the options for an XmlTable column: Named or ForOrdinality -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub enum XmlTableColumnOption { - /// A named column with a type, optional path, and default value. - NamedInfo { - /// The type of the column to be extracted. - r#type: DataType, - /// The path to the column to be extracted. If None, defaults to the column name. - path: Option, - /// Default value if path does not match - default: Option, - /// Whether the column is nullable (NULL=true, NOT NULL=false) - nullable: bool, - }, - /// The FOR ORDINALITY marker - ForOrdinality, -} - -/// A single column definition in XMLTABLE -/// -/// ```sql -/// COLUMNS -/// id int PATH '@id', -/// ordinality FOR ORDINALITY, -/// "COUNTRY_NAME" text, -/// country_id text PATH 'COUNTRY_ID', -/// size_sq_km float PATH 'SIZE[@unit = "sq_km"]', -/// size_other text PATH 'concat(SIZE[@unit!="sq_km"], " ", SIZE[@unit!="sq_km"]/@unit)', -/// premier_name text PATH 'PREMIER_NAME' DEFAULT 'not specified' -/// ``` -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct XmlTableColumn { - /// The name of the column. - pub name: Ident, - /// Column options: type/path/default or FOR ORDINALITY - pub option: XmlTableColumnOption, -} - -impl fmt::Display for XmlTableColumn { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.name)?; - match &self.option { - XmlTableColumnOption::NamedInfo { - r#type, - path, - default, - nullable, - } => { - write!(f, " {type}")?; - if let Some(p) = path { - write!(f, " PATH {p}")?; - } - if let Some(d) = default { - write!(f, " DEFAULT {d}")?; - } - if !*nullable { - write!(f, " NOT NULL")?; - } - Ok(()) - } - XmlTableColumnOption::ForOrdinality => { - write!(f, " FOR ORDINALITY") - } - } - } -} - -/// Argument passed in the XMLTABLE PASSING clause -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct XmlPassingArgument { - pub expr: Expr, - pub alias: Option, - pub by_value: bool, // True if BY VALUE is specified -} - -impl fmt::Display for XmlPassingArgument { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.by_value { - write!(f, "BY VALUE ")?; - } - write!(f, "{}", self.expr)?; - if let Some(alias) = &self.alias { - write!(f, " AS {alias}")?; - } - Ok(()) - } -} - -/// The PASSING clause for XMLTABLE -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct XmlPassingClause { - pub arguments: Vec, -} - -impl fmt::Display for XmlPassingClause { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if !self.arguments.is_empty() { - write!(f, " PASSING {}", display_comma_separated(&self.arguments))?; - } - Ok(()) - } -} - -/// Represents a single XML namespace definition in the XMLNAMESPACES clause. -/// -/// `namespace_uri AS namespace_name` -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct XmlNamespaceDefinition { - /// The namespace URI (a text expression). - pub uri: Expr, - /// The alias for the namespace (a simple identifier). - pub name: Ident, -} - -impl fmt::Display for XmlNamespaceDefinition { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{} AS {}", self.uri, self.name) - } -} diff --git a/src/ast/spans.rs b/src/ast/spans.rs deleted file mode 100644 index 3e82905e..00000000 --- a/src/ast/spans.rs +++ /dev/null @@ -1,2511 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -use crate::ast::{query::SelectItemQualifiedWildcardKind, ColumnOptions}; -use core::iter; - -use crate::tokenizer::Span; - -use super::{ - dcl::SecondaryRoles, value::ValueWithSpan, AccessExpr, AlterColumnOperation, - AlterIndexOperation, AlterTableOperation, Array, Assignment, AssignmentTarget, AttachedToken, - BeginEndStatements, CaseStatement, CloseCursor, ClusteredIndex, ColumnDef, ColumnOption, - ColumnOptionDef, ConditionalStatementBlock, ConditionalStatements, ConflictTarget, ConnectBy, - ConstraintCharacteristics, CopySource, CreateIndex, CreateTable, CreateTableOptions, Cte, - Delete, DoUpdate, ExceptSelectItem, ExcludeSelectItem, Expr, ExprWithAlias, Fetch, FromTable, - Function, FunctionArg, FunctionArgExpr, FunctionArgumentClause, FunctionArgumentList, - FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, IndexColumn, Insert, - Interpolate, InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem, - LateralView, LimitClause, MatchRecognizePattern, Measure, NamedParenthesizedList, - NamedWindowDefinition, ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction, - OnInsert, OpenStatement, OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource, - ProjectionSelect, Query, RaiseStatement, RaiseStatementValue, ReferentialAction, - RenameSelectItem, ReplaceSelectElement, ReplaceSelectItem, Select, SelectInto, SelectItem, - SetExpr, SqlOption, Statement, Subscript, SymbolDefinition, TableAlias, TableAliasColumnDef, - TableConstraint, TableFactor, TableObject, TableOptionsClustered, TableWithJoins, - UpdateTableFromKind, Use, Value, Values, ViewColumnDef, WhileStatement, - WildcardAdditionalOptions, With, WithFill, -}; - -/// Given an iterator of spans, return the [Span::union] of all spans. -fn union_spans>(iter: I) -> Span { - Span::union_iter(iter) -} - -/// Trait for AST nodes that have a source location information. -/// -/// # Notes: -/// -/// Source [`Span`] are not yet complete. They may be missing: -/// -/// 1. keywords or other tokens -/// 2. span information entirely, in which case they return [`Span::empty()`]. -/// -/// Note Some impl blocks (rendered below) are annotated with which nodes are -/// missing spans. See [this ticket] for additional information and status. -/// -/// [this ticket]: https://github.com/apache/datafusion-sqlparser-rs/issues/1548 -/// -/// # Example -/// ``` -/// # use sqlparser::parser::{Parser, ParserError}; -/// # use sqlparser::ast::Spanned; -/// # use sqlparser::dialect::GenericDialect; -/// # use sqlparser::tokenizer::Location; -/// # fn main() -> Result<(), ParserError> { -/// let dialect = GenericDialect {}; -/// let sql = r#"SELECT * -/// FROM table_1"#; -/// let statements = Parser::new(&dialect) -/// .try_with_sql(sql)? -/// .parse_statements()?; -/// // Get the span of the first statement (SELECT) -/// let span = statements[0].span(); -/// // statement starts at line 1, column 1 (1 based, not 0 based) -/// assert_eq!(span.start, Location::new(1, 1)); -/// // statement ends on line 2, column 15 -/// assert_eq!(span.end, Location::new(2, 15)); -/// # Ok(()) -/// # } -/// ``` -/// -pub trait Spanned { - /// Return the [`Span`] (the minimum and maximum [`Location`]) for this AST - /// node, by recursively combining the spans of its children. - /// - /// [`Location`]: crate::tokenizer::Location - fn span(&self) -> Span; -} - -impl Spanned for Query { - fn span(&self) -> Span { - let Query { - with, - body, - order_by, - limit_clause, - fetch, - locks: _, // todo - for_clause: _, // todo, mssql specific - settings: _, // todo, clickhouse specific - format_clause: _, // todo, clickhouse specific - pipe_operators: _, // todo bigquery specific - } = self; - - union_spans( - with.iter() - .map(|i| i.span()) - .chain(core::iter::once(body.span())) - .chain(order_by.as_ref().map(|i| i.span())) - .chain(limit_clause.as_ref().map(|i| i.span())) - .chain(fetch.as_ref().map(|i| i.span())), - ) - } -} - -impl Spanned for LimitClause { - fn span(&self) -> Span { - match self { - LimitClause::LimitOffset { - limit, - offset, - limit_by, - } => union_spans( - limit - .iter() - .map(|i| i.span()) - .chain(offset.as_ref().map(|i| i.span())) - .chain(limit_by.iter().map(|i| i.span())), - ), - LimitClause::OffsetCommaLimit { offset, limit } => offset.span().union(&limit.span()), - } - } -} - -impl Spanned for Offset { - fn span(&self) -> Span { - let Offset { - value, - rows: _, // enum - } = self; - - value.span() - } -} - -impl Spanned for Fetch { - fn span(&self) -> Span { - let Fetch { - with_ties: _, // bool - percent: _, // bool - quantity, - } = self; - - quantity.as_ref().map_or(Span::empty(), |i| i.span()) - } -} - -impl Spanned for With { - fn span(&self) -> Span { - let With { - with_token, - recursive: _, // bool - cte_tables, - } = self; - - union_spans( - core::iter::once(with_token.0.span).chain(cte_tables.iter().map(|item| item.span())), - ) - } -} - -impl Spanned for Cte { - fn span(&self) -> Span { - let Cte { - alias, - query, - from, - materialized: _, // enum - closing_paren_token, - } = self; - - union_spans( - core::iter::once(alias.span()) - .chain(core::iter::once(query.span())) - .chain(from.iter().map(|item| item.span)) - .chain(core::iter::once(closing_paren_token.0.span)), - ) - } -} - -/// # partial span -/// -/// [SetExpr::Table] is not implemented. -impl Spanned for SetExpr { - fn span(&self) -> Span { - match self { - SetExpr::Select(select) => select.span(), - SetExpr::Query(query) => query.span(), - SetExpr::SetOperation { - op: _, - set_quantifier: _, - left, - right, - } => left.span().union(&right.span()), - SetExpr::Values(values) => values.span(), - SetExpr::Insert(statement) => statement.span(), - SetExpr::Table(_) => Span::empty(), - SetExpr::Update(statement) => statement.span(), - SetExpr::Delete(statement) => statement.span(), - } - } -} - -impl Spanned for Values { - fn span(&self) -> Span { - let Values { - explicit_row: _, // bool, - rows, - } = self; - - union_spans( - rows.iter() - .map(|row| union_spans(row.iter().map(|expr| expr.span()))), - ) - } -} - -/// # partial span -/// -/// Missing spans: -/// - [Statement::CopyIntoSnowflake] -/// - [Statement::CreateSecret] -/// - [Statement::CreateRole] -/// - [Statement::AlterType] -/// - [Statement::AlterRole] -/// - [Statement::AttachDatabase] -/// - [Statement::AttachDuckDBDatabase] -/// - [Statement::DetachDuckDBDatabase] -/// - [Statement::Drop] -/// - [Statement::DropFunction] -/// - [Statement::DropProcedure] -/// - [Statement::DropSecret] -/// - [Statement::Declare] -/// - [Statement::CreateExtension] -/// - [Statement::Fetch] -/// - [Statement::Flush] -/// - [Statement::Discard] -/// - [Statement::Set] -/// - [Statement::ShowFunctions] -/// - [Statement::ShowVariable] -/// - [Statement::ShowStatus] -/// - [Statement::ShowVariables] -/// - [Statement::ShowCreate] -/// - [Statement::ShowColumns] -/// - [Statement::ShowTables] -/// - [Statement::ShowCollation] -/// - [Statement::StartTransaction] -/// - [Statement::Comment] -/// - [Statement::Commit] -/// - [Statement::Rollback] -/// - [Statement::CreateSchema] -/// - [Statement::CreateDatabase] -/// - [Statement::CreateFunction] -/// - [Statement::CreateTrigger] -/// - [Statement::DropTrigger] -/// - [Statement::CreateProcedure] -/// - [Statement::CreateMacro] -/// - [Statement::CreateStage] -/// - [Statement::Assert] -/// - [Statement::Grant] -/// - [Statement::Revoke] -/// - [Statement::Deallocate] -/// - [Statement::Execute] -/// - [Statement::Prepare] -/// - [Statement::Kill] -/// - [Statement::ExplainTable] -/// - [Statement::Explain] -/// - [Statement::Savepoint] -/// - [Statement::ReleaseSavepoint] -/// - [Statement::Merge] -/// - [Statement::Cache] -/// - [Statement::UNCache] -/// - [Statement::CreateSequence] -/// - [Statement::CreateType] -/// - [Statement::Pragma] -/// - [Statement::LockTables] -/// - [Statement::UnlockTables] -/// - [Statement::Unload] -/// - [Statement::OptimizeTable] -impl Spanned for Statement { - fn span(&self) -> Span { - match self { - Statement::Analyze { - table_name, - partitions, - for_columns: _, - columns, - cache_metadata: _, - noscan: _, - compute_statistics: _, - has_table_keyword: _, - } => union_spans( - core::iter::once(table_name.span()) - .chain(partitions.iter().flat_map(|i| i.iter().map(|k| k.span()))) - .chain(columns.iter().map(|i| i.span)), - ), - Statement::Truncate { - table_names, - partitions, - table: _, - identity: _, - cascade: _, - on_cluster: _, - } => union_spans( - table_names - .iter() - .map(|i| i.name.span()) - .chain(partitions.iter().flat_map(|i| i.iter().map(|k| k.span()))), - ), - Statement::Msck { - table_name, - repair: _, - partition_action: _, - } => table_name.span(), - Statement::Query(query) => query.span(), - Statement::Insert(insert) => insert.span(), - Statement::Install { extension_name } => extension_name.span, - Statement::Load { extension_name } => extension_name.span, - Statement::Directory { - overwrite: _, - local: _, - path: _, - file_format: _, - source, - } => source.span(), - Statement::Case(stmt) => stmt.span(), - Statement::If(stmt) => stmt.span(), - Statement::While(stmt) => stmt.span(), - Statement::Raise(stmt) => stmt.span(), - Statement::Call(function) => function.span(), - Statement::Copy { - source, - to: _, - target: _, - options: _, - legacy_options: _, - values: _, - } => source.span(), - Statement::CopyIntoSnowflake { - into: _, - into_columns: _, - from_obj: _, - from_obj_alias: _, - stage_params: _, - from_transformations: _, - files: _, - pattern: _, - file_format: _, - copy_options: _, - validation_mode: _, - kind: _, - from_query: _, - partition: _, - } => Span::empty(), - Statement::Open(open) => open.span(), - Statement::Close { cursor } => match cursor { - CloseCursor::All => Span::empty(), - CloseCursor::Specific { name } => name.span, - }, - Statement::Update { - table, - assignments, - from, - selection, - returning, - or: _, - } => union_spans( - core::iter::once(table.span()) - .chain(assignments.iter().map(|i| i.span())) - .chain(from.iter().map(|i| i.span())) - .chain(selection.iter().map(|i| i.span())) - .chain(returning.iter().flat_map(|i| i.iter().map(|k| k.span()))), - ), - Statement::Delete(delete) => delete.span(), - Statement::CreateView { - or_alter: _, - or_replace: _, - materialized: _, - name, - columns, - query, - options, - cluster_by, - comment: _, - with_no_schema_binding: _, - if_not_exists: _, - temporary: _, - to, - params: _, - } => union_spans( - core::iter::once(name.span()) - .chain(columns.iter().map(|i| i.span())) - .chain(core::iter::once(query.span())) - .chain(core::iter::once(options.span())) - .chain(cluster_by.iter().map(|i| i.span)) - .chain(to.iter().map(|i| i.span())), - ), - Statement::CreateTable(create_table) => create_table.span(), - Statement::CreateVirtualTable { - name, - if_not_exists: _, - module_name, - module_args, - } => union_spans( - core::iter::once(name.span()) - .chain(core::iter::once(module_name.span)) - .chain(module_args.iter().map(|i| i.span)), - ), - Statement::CreateIndex(create_index) => create_index.span(), - Statement::CreateRole { .. } => Span::empty(), - Statement::CreateSecret { .. } => Span::empty(), - Statement::CreateServer { .. } => Span::empty(), - Statement::CreateConnector { .. } => Span::empty(), - Statement::AlterTable { - name, - if_exists: _, - only: _, - operations, - location: _, - on_cluster, - iceberg: _, - } => union_spans( - core::iter::once(name.span()) - .chain(operations.iter().map(|i| i.span())) - .chain(on_cluster.iter().map(|i| i.span)), - ), - Statement::AlterIndex { name, operation } => name.span().union(&operation.span()), - Statement::AlterView { - name, - columns, - query, - with_options, - } => union_spans( - core::iter::once(name.span()) - .chain(columns.iter().map(|i| i.span)) - .chain(core::iter::once(query.span())) - .chain(with_options.iter().map(|i| i.span())), - ), - // These statements need to be implemented - Statement::AlterType { .. } => Span::empty(), - Statement::AlterRole { .. } => Span::empty(), - Statement::AlterSession { .. } => Span::empty(), - Statement::AttachDatabase { .. } => Span::empty(), - Statement::AttachDuckDBDatabase { .. } => Span::empty(), - Statement::DetachDuckDBDatabase { .. } => Span::empty(), - Statement::Drop { .. } => Span::empty(), - Statement::DropFunction { .. } => Span::empty(), - Statement::DropDomain { .. } => Span::empty(), - Statement::DropProcedure { .. } => Span::empty(), - Statement::DropSecret { .. } => Span::empty(), - Statement::Declare { .. } => Span::empty(), - Statement::CreateExtension { .. } => Span::empty(), - Statement::DropExtension { .. } => Span::empty(), - Statement::Fetch { .. } => Span::empty(), - Statement::Flush { .. } => Span::empty(), - Statement::Discard { .. } => Span::empty(), - Statement::Set(_) => Span::empty(), - Statement::ShowFunctions { .. } => Span::empty(), - Statement::ShowVariable { .. } => Span::empty(), - Statement::ShowStatus { .. } => Span::empty(), - Statement::ShowVariables { .. } => Span::empty(), - Statement::ShowCreate { .. } => Span::empty(), - Statement::ShowColumns { .. } => Span::empty(), - Statement::ShowTables { .. } => Span::empty(), - Statement::ShowCollation { .. } => Span::empty(), - Statement::Use(u) => u.span(), - Statement::StartTransaction { .. } => Span::empty(), - Statement::Comment { .. } => Span::empty(), - Statement::Commit { .. } => Span::empty(), - Statement::Rollback { .. } => Span::empty(), - Statement::CreateSchema { .. } => Span::empty(), - Statement::CreateDatabase { .. } => Span::empty(), - Statement::CreateFunction { .. } => Span::empty(), - Statement::CreateDomain { .. } => Span::empty(), - Statement::CreateTrigger { .. } => Span::empty(), - Statement::DropTrigger { .. } => Span::empty(), - Statement::CreateProcedure { .. } => Span::empty(), - Statement::CreateMacro { .. } => Span::empty(), - Statement::CreateStage { .. } => Span::empty(), - Statement::Assert { .. } => Span::empty(), - Statement::Grant { .. } => Span::empty(), - Statement::Deny { .. } => Span::empty(), - Statement::Revoke { .. } => Span::empty(), - Statement::Deallocate { .. } => Span::empty(), - Statement::Execute { .. } => Span::empty(), - Statement::Prepare { .. } => Span::empty(), - Statement::Kill { .. } => Span::empty(), - Statement::ExplainTable { .. } => Span::empty(), - Statement::Explain { .. } => Span::empty(), - Statement::Savepoint { .. } => Span::empty(), - Statement::ReleaseSavepoint { .. } => Span::empty(), - Statement::Merge { .. } => Span::empty(), - Statement::Cache { .. } => Span::empty(), - Statement::UNCache { .. } => Span::empty(), - Statement::CreateSequence { .. } => Span::empty(), - Statement::CreateType { .. } => Span::empty(), - Statement::Pragma { .. } => Span::empty(), - Statement::LockTables { .. } => Span::empty(), - Statement::UnlockTables => Span::empty(), - Statement::Unload { .. } => Span::empty(), - Statement::OptimizeTable { .. } => Span::empty(), - Statement::CreatePolicy { .. } => Span::empty(), - Statement::AlterPolicy { .. } => Span::empty(), - Statement::AlterConnector { .. } => Span::empty(), - Statement::DropPolicy { .. } => Span::empty(), - Statement::DropConnector { .. } => Span::empty(), - Statement::ShowDatabases { .. } => Span::empty(), - Statement::ShowSchemas { .. } => Span::empty(), - Statement::ShowObjects { .. } => Span::empty(), - Statement::ShowViews { .. } => Span::empty(), - Statement::LISTEN { .. } => Span::empty(), - Statement::NOTIFY { .. } => Span::empty(), - Statement::LoadData { .. } => Span::empty(), - Statement::UNLISTEN { .. } => Span::empty(), - Statement::RenameTable { .. } => Span::empty(), - Statement::RaisError { .. } => Span::empty(), - Statement::Print { .. } => Span::empty(), - Statement::Return { .. } => Span::empty(), - Statement::List(..) | Statement::Remove(..) => Span::empty(), - } - } -} - -impl Spanned for Use { - fn span(&self) -> Span { - match self { - Use::Catalog(object_name) => object_name.span(), - Use::Schema(object_name) => object_name.span(), - Use::Database(object_name) => object_name.span(), - Use::Warehouse(object_name) => object_name.span(), - Use::Role(object_name) => object_name.span(), - Use::SecondaryRoles(secondary_roles) => { - if let SecondaryRoles::List(roles) = secondary_roles { - return union_spans(roles.iter().map(|i| i.span)); - } - Span::empty() - } - Use::Object(object_name) => object_name.span(), - Use::Default => Span::empty(), - } - } -} - -impl Spanned for CreateTable { - fn span(&self) -> Span { - let CreateTable { - or_replace: _, // bool - temporary: _, // bool - external: _, // bool - global: _, // bool - if_not_exists: _, // bool - transient: _, // bool - volatile: _, // bool - iceberg: _, // bool, Snowflake specific - name, - columns, - constraints, - hive_distribution: _, // hive specific - hive_formats: _, // hive specific - file_format: _, // enum - location: _, // string, no span - query, - without_rowid: _, // bool - like, - clone, - comment: _, // todo, no span - on_commit: _, - on_cluster: _, // todo, clickhouse specific - primary_key: _, // todo, clickhouse specific - order_by: _, // todo, clickhouse specific - partition_by: _, // todo, BigQuery specific - cluster_by: _, // todo, BigQuery specific - clustered_by: _, // todo, Hive specific - inherits: _, // todo, PostgreSQL specific - strict: _, // bool - copy_grants: _, // bool - enable_schema_evolution: _, // bool - change_tracking: _, // bool - data_retention_time_in_days: _, // u64, no span - max_data_extension_time_in_days: _, // u64, no span - default_ddl_collation: _, // string, no span - with_aggregation_policy: _, // todo, Snowflake specific - with_row_access_policy: _, // todo, Snowflake specific - with_tags: _, // todo, Snowflake specific - external_volume: _, // todo, Snowflake specific - base_location: _, // todo, Snowflake specific - catalog: _, // todo, Snowflake specific - catalog_sync: _, // todo, Snowflake specific - storage_serialization_policy: _, - table_options, - } = self; - - union_spans( - core::iter::once(name.span()) - .chain(core::iter::once(table_options.span())) - .chain(columns.iter().map(|i| i.span())) - .chain(constraints.iter().map(|i| i.span())) - .chain(query.iter().map(|i| i.span())) - .chain(like.iter().map(|i| i.span())) - .chain(clone.iter().map(|i| i.span())), - ) - } -} - -impl Spanned for ColumnDef { - fn span(&self) -> Span { - let ColumnDef { - name, - data_type: _, // enum - options, - } = self; - - union_spans(core::iter::once(name.span).chain(options.iter().map(|i| i.span()))) - } -} - -impl Spanned for ColumnOptionDef { - fn span(&self) -> Span { - let ColumnOptionDef { name, option } = self; - - option.span().union_opt(&name.as_ref().map(|i| i.span)) - } -} - -impl Spanned for TableConstraint { - fn span(&self) -> Span { - match self { - TableConstraint::Unique { - name, - index_name, - index_type_display: _, - index_type: _, - columns, - index_options: _, - characteristics, - nulls_distinct: _, - } => union_spans( - name.iter() - .map(|i| i.span) - .chain(index_name.iter().map(|i| i.span)) - .chain(columns.iter().map(|i| i.span())) - .chain(characteristics.iter().map(|i| i.span())), - ), - TableConstraint::PrimaryKey { - name, - index_name, - index_type: _, - columns, - index_options: _, - characteristics, - } => union_spans( - name.iter() - .map(|i| i.span) - .chain(index_name.iter().map(|i| i.span)) - .chain(columns.iter().map(|i| i.span())) - .chain(characteristics.iter().map(|i| i.span())), - ), - TableConstraint::ForeignKey { - name, - columns, - index_name, - foreign_table, - referred_columns, - on_delete, - on_update, - characteristics, - } => union_spans( - name.iter() - .map(|i| i.span) - .chain(index_name.iter().map(|i| i.span)) - .chain(columns.iter().map(|i| i.span)) - .chain(core::iter::once(foreign_table.span())) - .chain(referred_columns.iter().map(|i| i.span)) - .chain(on_delete.iter().map(|i| i.span())) - .chain(on_update.iter().map(|i| i.span())) - .chain(characteristics.iter().map(|i| i.span())), - ), - TableConstraint::Check { - name, - expr, - enforced: _, - } => expr.span().union_opt(&name.as_ref().map(|i| i.span)), - TableConstraint::Index { - display_as_key: _, - name, - index_type: _, - columns, - } => union_spans( - name.iter() - .map(|i| i.span) - .chain(columns.iter().map(|i| i.span())), - ), - TableConstraint::FulltextOrSpatial { - fulltext: _, - index_type_display: _, - opt_index_name, - columns, - } => union_spans( - opt_index_name - .iter() - .map(|i| i.span) - .chain(columns.iter().map(|i| i.span())), - ), - } - } -} - -impl Spanned for CreateIndex { - fn span(&self) -> Span { - let CreateIndex { - name, - table_name, - using: _, - columns, - unique: _, // bool - concurrently: _, // bool - if_not_exists: _, // bool - include, - nulls_distinct: _, // bool - with, - predicate, - } = self; - - union_spans( - name.iter() - .map(|i| i.span()) - .chain(core::iter::once(table_name.span())) - .chain(columns.iter().map(|i| i.column.span())) - .chain(include.iter().map(|i| i.span)) - .chain(with.iter().map(|i| i.span())) - .chain(predicate.iter().map(|i| i.span())), - ) - } -} - -impl Spanned for IndexColumn { - fn span(&self) -> Span { - self.column.span() - } -} - -impl Spanned for CaseStatement { - fn span(&self) -> Span { - let CaseStatement { - case_token: AttachedToken(start), - match_expr: _, - when_blocks: _, - else_block: _, - end_case_token: AttachedToken(end), - } = self; - - union_spans([start.span, end.span].into_iter()) - } -} - -impl Spanned for IfStatement { - fn span(&self) -> Span { - let IfStatement { - if_block, - elseif_blocks, - else_block, - end_token, - } = self; - - union_spans( - iter::once(if_block.span()) - .chain(elseif_blocks.iter().map(|b| b.span())) - .chain(else_block.as_ref().map(|b| b.span())) - .chain(end_token.as_ref().map(|AttachedToken(t)| t.span)), - ) - } -} - -impl Spanned for WhileStatement { - fn span(&self) -> Span { - let WhileStatement { while_block } = self; - - while_block.span() - } -} - -impl Spanned for ConditionalStatements { - fn span(&self) -> Span { - match self { - ConditionalStatements::Sequence { statements } => { - union_spans(statements.iter().map(|s| s.span())) - } - ConditionalStatements::BeginEnd(bes) => bes.span(), - } - } -} - -impl Spanned for ConditionalStatementBlock { - fn span(&self) -> Span { - let ConditionalStatementBlock { - start_token: AttachedToken(start_token), - condition, - then_token, - conditional_statements, - } = self; - - union_spans( - iter::once(start_token.span) - .chain(condition.as_ref().map(|c| c.span())) - .chain(then_token.as_ref().map(|AttachedToken(t)| t.span)) - .chain(iter::once(conditional_statements.span())), - ) - } -} - -impl Spanned for RaiseStatement { - fn span(&self) -> Span { - let RaiseStatement { value } = self; - - union_spans(value.iter().map(|value| value.span())) - } -} - -impl Spanned for RaiseStatementValue { - fn span(&self) -> Span { - match self { - RaiseStatementValue::UsingMessage(expr) => expr.span(), - RaiseStatementValue::Expr(expr) => expr.span(), - } - } -} - -/// # partial span -/// -/// Missing spans: -/// - [ColumnOption::Null] -/// - [ColumnOption::NotNull] -/// - [ColumnOption::Comment] -/// - [ColumnOption::Unique]¨ -/// - [ColumnOption::DialectSpecific] -/// - [ColumnOption::Generated] -impl Spanned for ColumnOption { - fn span(&self) -> Span { - match self { - ColumnOption::Null => Span::empty(), - ColumnOption::NotNull => Span::empty(), - ColumnOption::Default(expr) => expr.span(), - ColumnOption::Materialized(expr) => expr.span(), - ColumnOption::Ephemeral(expr) => expr.as_ref().map_or(Span::empty(), |e| e.span()), - ColumnOption::Alias(expr) => expr.span(), - ColumnOption::Unique { .. } => Span::empty(), - ColumnOption::ForeignKey { - foreign_table, - referred_columns, - on_delete, - on_update, - characteristics, - } => union_spans( - core::iter::once(foreign_table.span()) - .chain(referred_columns.iter().map(|i| i.span)) - .chain(on_delete.iter().map(|i| i.span())) - .chain(on_update.iter().map(|i| i.span())) - .chain(characteristics.iter().map(|i| i.span())), - ), - ColumnOption::Check(expr) => expr.span(), - ColumnOption::DialectSpecific(_) => Span::empty(), - ColumnOption::CharacterSet(object_name) => object_name.span(), - ColumnOption::Collation(object_name) => object_name.span(), - ColumnOption::Comment(_) => Span::empty(), - ColumnOption::OnUpdate(expr) => expr.span(), - ColumnOption::Generated { .. } => Span::empty(), - ColumnOption::Options(vec) => union_spans(vec.iter().map(|i| i.span())), - ColumnOption::Identity(..) => Span::empty(), - ColumnOption::OnConflict(..) => Span::empty(), - ColumnOption::Policy(..) => Span::empty(), - ColumnOption::Tags(..) => Span::empty(), - ColumnOption::Srid(..) => Span::empty(), - } - } -} - -/// # missing span -impl Spanned for ReferentialAction { - fn span(&self) -> Span { - Span::empty() - } -} - -/// # missing span -impl Spanned for ConstraintCharacteristics { - fn span(&self) -> Span { - let ConstraintCharacteristics { - deferrable: _, // bool - initially: _, // enum - enforced: _, // bool - } = self; - - Span::empty() - } -} - -/// # partial span -/// -/// Missing spans: -/// - [AlterColumnOperation::SetNotNull] -/// - [AlterColumnOperation::DropNotNull] -/// - [AlterColumnOperation::DropDefault] -/// - [AlterColumnOperation::AddGenerated] -impl Spanned for AlterColumnOperation { - fn span(&self) -> Span { - match self { - AlterColumnOperation::SetNotNull => Span::empty(), - AlterColumnOperation::DropNotNull => Span::empty(), - AlterColumnOperation::SetDefault { value } => value.span(), - AlterColumnOperation::DropDefault => Span::empty(), - AlterColumnOperation::SetDataType { - data_type: _, - using, - had_set: _, - } => using.as_ref().map_or(Span::empty(), |u| u.span()), - AlterColumnOperation::AddGenerated { .. } => Span::empty(), - } - } -} - -impl Spanned for CopySource { - fn span(&self) -> Span { - match self { - CopySource::Table { - table_name, - columns, - } => union_spans( - core::iter::once(table_name.span()).chain(columns.iter().map(|i| i.span)), - ), - CopySource::Query(query) => query.span(), - } - } -} - -impl Spanned for Delete { - fn span(&self) -> Span { - let Delete { - tables, - from, - using, - selection, - returning, - order_by, - limit, - } = self; - - union_spans( - tables - .iter() - .map(|i| i.span()) - .chain(core::iter::once(from.span())) - .chain( - using - .iter() - .map(|u| union_spans(u.iter().map(|i| i.span()))), - ) - .chain(selection.iter().map(|i| i.span())) - .chain(returning.iter().flat_map(|i| i.iter().map(|k| k.span()))) - .chain(order_by.iter().map(|i| i.span())) - .chain(limit.iter().map(|i| i.span())), - ) - } -} - -impl Spanned for FromTable { - fn span(&self) -> Span { - match self { - FromTable::WithFromKeyword(vec) => union_spans(vec.iter().map(|i| i.span())), - FromTable::WithoutKeyword(vec) => union_spans(vec.iter().map(|i| i.span())), - } - } -} - -impl Spanned for ViewColumnDef { - fn span(&self) -> Span { - let ViewColumnDef { - name, - data_type: _, // todo, DataType - options, - } = self; - - name.span.union_opt(&options.as_ref().map(|o| o.span())) - } -} - -impl Spanned for ColumnOptions { - fn span(&self) -> Span { - union_spans(self.as_slice().iter().map(|i| i.span())) - } -} - -impl Spanned for SqlOption { - fn span(&self) -> Span { - match self { - SqlOption::Clustered(table_options_clustered) => table_options_clustered.span(), - SqlOption::Ident(ident) => ident.span, - SqlOption::KeyValue { key, value } => key.span.union(&value.span()), - SqlOption::Partition { - column_name, - range_direction: _, - for_values, - } => union_spans( - core::iter::once(column_name.span).chain(for_values.iter().map(|i| i.span())), - ), - SqlOption::TableSpace(_) => Span::empty(), - SqlOption::Comment(_) => Span::empty(), - SqlOption::NamedParenthesizedList(NamedParenthesizedList { - key: name, - name: value, - values, - }) => union_spans(core::iter::once(name.span).chain(values.iter().map(|i| i.span))) - .union_opt(&value.as_ref().map(|i| i.span)), - } - } -} - -/// # partial span -/// -/// Missing spans: -/// - [TableOptionsClustered::ColumnstoreIndex] -impl Spanned for TableOptionsClustered { - fn span(&self) -> Span { - match self { - TableOptionsClustered::ColumnstoreIndex => Span::empty(), - TableOptionsClustered::ColumnstoreIndexOrder(vec) => { - union_spans(vec.iter().map(|i| i.span)) - } - TableOptionsClustered::Index(vec) => union_spans(vec.iter().map(|i| i.span())), - } - } -} - -impl Spanned for ClusteredIndex { - fn span(&self) -> Span { - let ClusteredIndex { - name, - asc: _, // bool - } = self; - - name.span - } -} - -impl Spanned for CreateTableOptions { - fn span(&self) -> Span { - match self { - CreateTableOptions::None => Span::empty(), - CreateTableOptions::With(vec) => union_spans(vec.iter().map(|i| i.span())), - CreateTableOptions::Options(vec) => { - union_spans(vec.as_slice().iter().map(|i| i.span())) - } - CreateTableOptions::Plain(vec) => union_spans(vec.iter().map(|i| i.span())), - CreateTableOptions::TableProperties(vec) => union_spans(vec.iter().map(|i| i.span())), - } - } -} - -/// # partial span -/// -/// Missing spans: -/// - [AlterTableOperation::OwnerTo] -impl Spanned for AlterTableOperation { - fn span(&self) -> Span { - match self { - AlterTableOperation::AddConstraint { - constraint, - not_valid: _, - } => constraint.span(), - AlterTableOperation::AddColumn { - column_keyword: _, - if_not_exists: _, - column_def, - column_position: _, - } => column_def.span(), - AlterTableOperation::AddProjection { - if_not_exists: _, - name, - select, - } => name.span.union(&select.span()), - AlterTableOperation::DropProjection { if_exists: _, name } => name.span, - AlterTableOperation::MaterializeProjection { - if_exists: _, - name, - partition, - } => name.span.union_opt(&partition.as_ref().map(|i| i.span)), - AlterTableOperation::ClearProjection { - if_exists: _, - name, - partition, - } => name.span.union_opt(&partition.as_ref().map(|i| i.span)), - AlterTableOperation::DisableRowLevelSecurity => Span::empty(), - AlterTableOperation::DisableRule { name } => name.span, - AlterTableOperation::DisableTrigger { name } => name.span, - AlterTableOperation::DropConstraint { - if_exists: _, - name, - drop_behavior: _, - } => name.span, - AlterTableOperation::DropColumn { - has_column_keyword: _, - column_names, - if_exists: _, - drop_behavior: _, - } => union_spans(column_names.iter().map(|i| i.span)), - AlterTableOperation::AttachPartition { partition } => partition.span(), - AlterTableOperation::DetachPartition { partition } => partition.span(), - AlterTableOperation::FreezePartition { - partition, - with_name, - } => partition - .span() - .union_opt(&with_name.as_ref().map(|n| n.span)), - AlterTableOperation::UnfreezePartition { - partition, - with_name, - } => partition - .span() - .union_opt(&with_name.as_ref().map(|n| n.span)), - AlterTableOperation::DropPrimaryKey => Span::empty(), - AlterTableOperation::DropForeignKey { name } => name.span, - AlterTableOperation::DropIndex { name } => name.span, - AlterTableOperation::EnableAlwaysRule { name } => name.span, - AlterTableOperation::EnableAlwaysTrigger { name } => name.span, - AlterTableOperation::EnableReplicaRule { name } => name.span, - AlterTableOperation::EnableReplicaTrigger { name } => name.span, - AlterTableOperation::EnableRowLevelSecurity => Span::empty(), - AlterTableOperation::EnableRule { name } => name.span, - AlterTableOperation::EnableTrigger { name } => name.span, - AlterTableOperation::RenamePartitions { - old_partitions, - new_partitions, - } => union_spans( - old_partitions - .iter() - .map(|i| i.span()) - .chain(new_partitions.iter().map(|i| i.span())), - ), - AlterTableOperation::AddPartitions { - if_not_exists: _, - new_partitions, - } => union_spans(new_partitions.iter().map(|i| i.span())), - AlterTableOperation::DropPartitions { - partitions, - if_exists: _, - } => union_spans(partitions.iter().map(|i| i.span())), - AlterTableOperation::RenameColumn { - old_column_name, - new_column_name, - } => old_column_name.span.union(&new_column_name.span), - AlterTableOperation::RenameTable { table_name } => table_name.span(), - AlterTableOperation::ChangeColumn { - old_name, - new_name, - data_type: _, - options, - column_position: _, - } => union_spans( - core::iter::once(old_name.span) - .chain(core::iter::once(new_name.span)) - .chain(options.iter().map(|i| i.span())), - ), - AlterTableOperation::ModifyColumn { - col_name, - data_type: _, - options, - column_position: _, - } => { - union_spans(core::iter::once(col_name.span).chain(options.iter().map(|i| i.span()))) - } - AlterTableOperation::RenameConstraint { old_name, new_name } => { - old_name.span.union(&new_name.span) - } - AlterTableOperation::AlterColumn { column_name, op } => { - column_name.span.union(&op.span()) - } - AlterTableOperation::SwapWith { table_name } => table_name.span(), - AlterTableOperation::SetTblProperties { table_properties } => { - union_spans(table_properties.iter().map(|i| i.span())) - } - AlterTableOperation::OwnerTo { .. } => Span::empty(), - AlterTableOperation::ClusterBy { exprs } => union_spans(exprs.iter().map(|e| e.span())), - AlterTableOperation::DropClusteringKey => Span::empty(), - AlterTableOperation::SuspendRecluster => Span::empty(), - AlterTableOperation::ResumeRecluster => Span::empty(), - AlterTableOperation::Algorithm { .. } => Span::empty(), - AlterTableOperation::AutoIncrement { value, .. } => value.span(), - AlterTableOperation::Lock { .. } => Span::empty(), - AlterTableOperation::ReplicaIdentity { .. } => Span::empty(), - AlterTableOperation::ValidateConstraint { name } => name.span, - } - } -} - -impl Spanned for Partition { - fn span(&self) -> Span { - match self { - Partition::Identifier(ident) => ident.span, - Partition::Expr(expr) => expr.span(), - Partition::Part(expr) => expr.span(), - Partition::Partitions(vec) => union_spans(vec.iter().map(|i| i.span())), - } - } -} - -impl Spanned for ProjectionSelect { - fn span(&self) -> Span { - let ProjectionSelect { - projection, - order_by, - group_by, - } = self; - - union_spans( - projection - .iter() - .map(|i| i.span()) - .chain(order_by.iter().map(|i| i.span())) - .chain(group_by.iter().map(|i| i.span())), - ) - } -} - -/// # partial span -/// -/// Missing spans: -/// - [OrderByKind::All] -impl Spanned for OrderBy { - fn span(&self) -> Span { - match &self.kind { - OrderByKind::All(_) => Span::empty(), - OrderByKind::Expressions(exprs) => union_spans( - exprs - .iter() - .map(|i| i.span()) - .chain(self.interpolate.iter().map(|i| i.span())), - ), - } - } -} - -/// # partial span -/// -/// Missing spans: -/// - [GroupByExpr::All] -impl Spanned for GroupByExpr { - fn span(&self) -> Span { - match self { - GroupByExpr::All(_) => Span::empty(), - GroupByExpr::Expressions(exprs, _modifiers) => { - union_spans(exprs.iter().map(|i| i.span())) - } - } - } -} - -impl Spanned for Interpolate { - fn span(&self) -> Span { - let Interpolate { exprs } = self; - - union_spans(exprs.iter().flat_map(|i| i.iter().map(|e| e.span()))) - } -} - -impl Spanned for InterpolateExpr { - fn span(&self) -> Span { - let InterpolateExpr { column, expr } = self; - - column.span.union_opt(&expr.as_ref().map(|e| e.span())) - } -} - -impl Spanned for AlterIndexOperation { - fn span(&self) -> Span { - match self { - AlterIndexOperation::RenameIndex { index_name } => index_name.span(), - } - } -} - -/// # partial span -/// -/// Missing spans:ever -/// - [Insert::insert_alias] -impl Spanned for Insert { - fn span(&self) -> Span { - let Insert { - or: _, // enum, sqlite specific - ignore: _, // bool - into: _, // bool - table, - table_alias, - columns, - overwrite: _, // bool - source, - partitioned, - after_columns, - has_table_keyword: _, // bool - on, - returning, - replace_into: _, // bool - priority: _, // todo, mysql specific - insert_alias: _, // todo, mysql specific - assignments, - settings: _, // todo, clickhouse specific - format_clause: _, // todo, clickhouse specific - } = self; - - union_spans( - core::iter::once(table.span()) - .chain(table_alias.as_ref().map(|i| i.span)) - .chain(columns.iter().map(|i| i.span)) - .chain(source.as_ref().map(|q| q.span())) - .chain(assignments.iter().map(|i| i.span())) - .chain(partitioned.iter().flat_map(|i| i.iter().map(|k| k.span()))) - .chain(after_columns.iter().map(|i| i.span)) - .chain(on.as_ref().map(|i| i.span())) - .chain(returning.iter().flat_map(|i| i.iter().map(|k| k.span()))), - ) - } -} - -impl Spanned for OnInsert { - fn span(&self) -> Span { - match self { - OnInsert::DuplicateKeyUpdate(vec) => union_spans(vec.iter().map(|i| i.span())), - OnInsert::OnConflict(on_conflict) => on_conflict.span(), - } - } -} - -impl Spanned for OnConflict { - fn span(&self) -> Span { - let OnConflict { - conflict_target, - action, - } = self; - - action - .span() - .union_opt(&conflict_target.as_ref().map(|i| i.span())) - } -} - -impl Spanned for ConflictTarget { - fn span(&self) -> Span { - match self { - ConflictTarget::Columns(vec) => union_spans(vec.iter().map(|i| i.span)), - ConflictTarget::OnConstraint(object_name) => object_name.span(), - } - } -} - -/// # partial span -/// -/// Missing spans: -/// - [OnConflictAction::DoNothing] -impl Spanned for OnConflictAction { - fn span(&self) -> Span { - match self { - OnConflictAction::DoNothing => Span::empty(), - OnConflictAction::DoUpdate(do_update) => do_update.span(), - } - } -} - -impl Spanned for DoUpdate { - fn span(&self) -> Span { - let DoUpdate { - assignments, - selection, - } = self; - - union_spans( - assignments - .iter() - .map(|i| i.span()) - .chain(selection.iter().map(|i| i.span())), - ) - } -} - -impl Spanned for Assignment { - fn span(&self) -> Span { - let Assignment { target, value } = self; - - target.span().union(&value.span()) - } -} - -impl Spanned for AssignmentTarget { - fn span(&self) -> Span { - match self { - AssignmentTarget::ColumnName(object_name) => object_name.span(), - AssignmentTarget::Tuple(vec) => union_spans(vec.iter().map(|i| i.span())), - } - } -} - -/// # partial span -/// -/// Most expressions are missing keywords in their spans. -/// f.e. `IS NULL ` reports as `::span`. -/// -/// Missing spans: -/// - [Expr::MatchAgainst] # MySQL specific -/// - [Expr::RLike] # MySQL specific -/// - [Expr::Struct] # BigQuery specific -/// - [Expr::Named] # BigQuery specific -/// - [Expr::Dictionary] # DuckDB specific -/// - [Expr::Map] # DuckDB specific -/// - [Expr::Lambda] -impl Spanned for Expr { - fn span(&self) -> Span { - match self { - Expr::Identifier(ident) => ident.span, - Expr::CompoundIdentifier(vec) => union_spans(vec.iter().map(|i| i.span)), - Expr::CompoundFieldAccess { root, access_chain } => { - union_spans(iter::once(root.span()).chain(access_chain.iter().map(|i| i.span()))) - } - Expr::IsFalse(expr) => expr.span(), - Expr::IsNotFalse(expr) => expr.span(), - Expr::IsTrue(expr) => expr.span(), - Expr::IsNotTrue(expr) => expr.span(), - Expr::IsNull(expr) => expr.span(), - Expr::IsNotNull(expr) => expr.span(), - Expr::IsUnknown(expr) => expr.span(), - Expr::IsNotUnknown(expr) => expr.span(), - Expr::IsDistinctFrom(lhs, rhs) => lhs.span().union(&rhs.span()), - Expr::IsNotDistinctFrom(lhs, rhs) => lhs.span().union(&rhs.span()), - Expr::InList { - expr, - list, - negated: _, - } => union_spans( - core::iter::once(expr.span()).chain(list.iter().map(|item| item.span())), - ), - Expr::InSubquery { - expr, - subquery, - negated: _, - } => expr.span().union(&subquery.span()), - Expr::InUnnest { - expr, - array_expr, - negated: _, - } => expr.span().union(&array_expr.span()), - Expr::Between { - expr, - negated: _, - low, - high, - } => expr.span().union(&low.span()).union(&high.span()), - - Expr::BinaryOp { left, op: _, right } => left.span().union(&right.span()), - Expr::Like { - negated: _, - expr, - pattern, - escape_char: _, - any: _, - } => expr.span().union(&pattern.span()), - Expr::ILike { - negated: _, - expr, - pattern, - escape_char: _, - any: _, - } => expr.span().union(&pattern.span()), - Expr::RLike { .. } => Span::empty(), - Expr::IsNormalized { - expr, - form: _, - negated: _, - } => expr.span(), - Expr::SimilarTo { - negated: _, - expr, - pattern, - escape_char: _, - } => expr.span().union(&pattern.span()), - Expr::Ceil { expr, field: _ } => expr.span(), - Expr::Floor { expr, field: _ } => expr.span(), - Expr::Position { expr, r#in } => expr.span().union(&r#in.span()), - Expr::Overlay { - expr, - overlay_what, - overlay_from, - overlay_for, - } => expr - .span() - .union(&overlay_what.span()) - .union(&overlay_from.span()) - .union_opt(&overlay_for.as_ref().map(|i| i.span())), - Expr::Collate { expr, collation } => expr - .span() - .union(&union_spans(collation.0.iter().map(|i| i.span()))), - Expr::Nested(expr) => expr.span(), - Expr::Value(value) => value.span(), - Expr::TypedString { value, .. } => value.span(), - Expr::Function(function) => function.span(), - Expr::GroupingSets(vec) => { - union_spans(vec.iter().flat_map(|i| i.iter().map(|k| k.span()))) - } - Expr::Cube(vec) => union_spans(vec.iter().flat_map(|i| i.iter().map(|k| k.span()))), - Expr::Rollup(vec) => union_spans(vec.iter().flat_map(|i| i.iter().map(|k| k.span()))), - Expr::Tuple(vec) => union_spans(vec.iter().map(|i| i.span())), - Expr::Array(array) => array.span(), - Expr::MatchAgainst { .. } => Span::empty(), - Expr::JsonAccess { value, path } => value.span().union(&path.span()), - Expr::AnyOp { - left, - compare_op: _, - right, - is_some: _, - } => left.span().union(&right.span()), - Expr::AllOp { - left, - compare_op: _, - right, - } => left.span().union(&right.span()), - Expr::UnaryOp { op: _, expr } => expr.span(), - Expr::Convert { - expr, - data_type: _, - charset, - target_before_value: _, - styles, - is_try: _, - } => union_spans( - core::iter::once(expr.span()) - .chain(charset.as_ref().map(|i| i.span())) - .chain(styles.iter().map(|i| i.span())), - ), - Expr::Cast { - kind: _, - expr, - data_type: _, - format: _, - } => expr.span(), - Expr::AtTimeZone { - timestamp, - time_zone, - } => timestamp.span().union(&time_zone.span()), - Expr::Extract { - field: _, - syntax: _, - expr, - } => expr.span(), - Expr::Substring { - expr, - substring_from, - substring_for, - special: _, - shorthand: _, - } => union_spans( - core::iter::once(expr.span()) - .chain(substring_from.as_ref().map(|i| i.span())) - .chain(substring_for.as_ref().map(|i| i.span())), - ), - Expr::Trim { - expr, - trim_where: _, - trim_what, - trim_characters, - } => union_spans( - core::iter::once(expr.span()) - .chain(trim_what.as_ref().map(|i| i.span())) - .chain( - trim_characters - .as_ref() - .map(|items| union_spans(items.iter().map(|i| i.span()))), - ), - ), - Expr::Prefixed { value, .. } => value.span(), - Expr::Case { - case_token, - end_token, - operand, - conditions, - else_result, - } => union_spans( - iter::once(case_token.0.span) - .chain( - operand - .as_ref() - .map(|i| i.span()) - .into_iter() - .chain(conditions.iter().flat_map(|case_when| { - [case_when.condition.span(), case_when.result.span()] - })) - .chain(else_result.as_ref().map(|i| i.span())), - ) - .chain(iter::once(end_token.0.span)), - ), - Expr::Exists { subquery, .. } => subquery.span(), - Expr::Subquery(query) => query.span(), - Expr::Struct { .. } => Span::empty(), - Expr::Named { .. } => Span::empty(), - Expr::Dictionary(_) => Span::empty(), - Expr::Map(_) => Span::empty(), - Expr::Interval(interval) => interval.value.span(), - Expr::Wildcard(token) => token.0.span, - Expr::QualifiedWildcard(object_name, token) => union_spans( - object_name - .0 - .iter() - .map(|i| i.span()) - .chain(iter::once(token.0.span)), - ), - Expr::OuterJoin(expr) => expr.span(), - Expr::Prior(expr) => expr.span(), - Expr::Lambda(_) => Span::empty(), - Expr::MemberOf(member_of) => member_of.value.span().union(&member_of.array.span()), - } - } -} - -impl Spanned for Subscript { - fn span(&self) -> Span { - match self { - Subscript::Index { index } => index.span(), - Subscript::Slice { - lower_bound, - upper_bound, - stride, - } => union_spans( - [ - lower_bound.as_ref().map(|i| i.span()), - upper_bound.as_ref().map(|i| i.span()), - stride.as_ref().map(|i| i.span()), - ] - .into_iter() - .flatten(), - ), - } - } -} - -impl Spanned for AccessExpr { - fn span(&self) -> Span { - match self { - AccessExpr::Dot(ident) => ident.span(), - AccessExpr::Subscript(subscript) => subscript.span(), - } - } -} - -impl Spanned for ObjectName { - fn span(&self) -> Span { - let ObjectName(segments) = self; - - union_spans(segments.iter().map(|i| i.span())) - } -} - -impl Spanned for ObjectNamePart { - fn span(&self) -> Span { - match self { - ObjectNamePart::Identifier(ident) => ident.span, - ObjectNamePart::Function(func) => func - .name - .span - .union(&union_spans(func.args.iter().map(|i| i.span()))), - } - } -} - -impl Spanned for Array { - fn span(&self) -> Span { - let Array { - elem, - named: _, // bool - } = self; - - union_spans(elem.iter().map(|i| i.span())) - } -} - -impl Spanned for Function { - fn span(&self) -> Span { - let Function { - name, - uses_odbc_syntax: _, - parameters, - args, - filter, - null_treatment: _, // enum - over: _, // todo - within_group, - } = self; - - union_spans( - name.0 - .iter() - .map(|i| i.span()) - .chain(iter::once(args.span())) - .chain(iter::once(parameters.span())) - .chain(filter.iter().map(|i| i.span())) - .chain(within_group.iter().map(|i| i.span())), - ) - } -} - -/// # partial span -/// -/// The span of [FunctionArguments::None] is empty. -impl Spanned for FunctionArguments { - fn span(&self) -> Span { - match self { - FunctionArguments::None => Span::empty(), - FunctionArguments::Subquery(query) => query.span(), - FunctionArguments::List(list) => list.span(), - } - } -} - -impl Spanned for FunctionArgumentList { - fn span(&self) -> Span { - let FunctionArgumentList { - duplicate_treatment: _, // enum - args, - clauses, - } = self; - - union_spans( - // # todo: duplicate-treatment span - args.iter() - .map(|i| i.span()) - .chain(clauses.iter().map(|i| i.span())), - ) - } -} - -impl Spanned for FunctionArgumentClause { - fn span(&self) -> Span { - match self { - FunctionArgumentClause::IgnoreOrRespectNulls(_) => Span::empty(), - FunctionArgumentClause::OrderBy(vec) => union_spans(vec.iter().map(|i| i.expr.span())), - FunctionArgumentClause::Limit(expr) => expr.span(), - FunctionArgumentClause::OnOverflow(_) => Span::empty(), - FunctionArgumentClause::Having(HavingBound(_kind, expr)) => expr.span(), - FunctionArgumentClause::Separator(value) => value.span(), - FunctionArgumentClause::JsonNullClause(_) => Span::empty(), - } - } -} - -/// # partial span -/// -/// see Spanned impl for JsonPathElem for more information -impl Spanned for JsonPath { - fn span(&self) -> Span { - let JsonPath { path } = self; - - union_spans(path.iter().map(|i| i.span())) - } -} - -/// # partial span -/// -/// Missing spans: -/// - [JsonPathElem::Dot] -impl Spanned for JsonPathElem { - fn span(&self) -> Span { - match self { - JsonPathElem::Dot { .. } => Span::empty(), - JsonPathElem::Bracket { key } => key.span(), - } - } -} - -impl Spanned for SelectItemQualifiedWildcardKind { - fn span(&self) -> Span { - match self { - SelectItemQualifiedWildcardKind::ObjectName(object_name) => object_name.span(), - SelectItemQualifiedWildcardKind::Expr(expr) => expr.span(), - } - } -} - -impl Spanned for SelectItem { - fn span(&self) -> Span { - match self { - SelectItem::UnnamedExpr(expr) => expr.span(), - SelectItem::ExprWithAlias { expr, alias } => expr.span().union(&alias.span), - SelectItem::QualifiedWildcard(kind, wildcard_additional_options) => union_spans( - [kind.span()] - .into_iter() - .chain(iter::once(wildcard_additional_options.span())), - ), - SelectItem::Wildcard(wildcard_additional_options) => wildcard_additional_options.span(), - } - } -} - -impl Spanned for WildcardAdditionalOptions { - fn span(&self) -> Span { - let WildcardAdditionalOptions { - wildcard_token, - opt_ilike, - opt_exclude, - opt_except, - opt_replace, - opt_rename, - } = self; - - union_spans( - core::iter::once(wildcard_token.0.span) - .chain(opt_ilike.as_ref().map(|i| i.span())) - .chain(opt_exclude.as_ref().map(|i| i.span())) - .chain(opt_rename.as_ref().map(|i| i.span())) - .chain(opt_replace.as_ref().map(|i| i.span())) - .chain(opt_except.as_ref().map(|i| i.span())), - ) - } -} - -/// # missing span -impl Spanned for IlikeSelectItem { - fn span(&self) -> Span { - Span::empty() - } -} - -impl Spanned for ExcludeSelectItem { - fn span(&self) -> Span { - match self { - ExcludeSelectItem::Single(ident) => ident.span, - ExcludeSelectItem::Multiple(vec) => union_spans(vec.iter().map(|i| i.span)), - } - } -} - -impl Spanned for RenameSelectItem { - fn span(&self) -> Span { - match self { - RenameSelectItem::Single(ident) => ident.ident.span.union(&ident.alias.span), - RenameSelectItem::Multiple(vec) => { - union_spans(vec.iter().map(|i| i.ident.span.union(&i.alias.span))) - } - } - } -} - -impl Spanned for ExceptSelectItem { - fn span(&self) -> Span { - let ExceptSelectItem { - first_element, - additional_elements, - } = self; - - union_spans( - iter::once(first_element.span).chain(additional_elements.iter().map(|i| i.span)), - ) - } -} - -impl Spanned for ReplaceSelectItem { - fn span(&self) -> Span { - let ReplaceSelectItem { items } = self; - - union_spans(items.iter().map(|i| i.span())) - } -} - -impl Spanned for ReplaceSelectElement { - fn span(&self) -> Span { - let ReplaceSelectElement { - expr, - column_name, - as_keyword: _, // bool - } = self; - - expr.span().union(&column_name.span) - } -} - -/// # partial span -/// -/// Missing spans: -/// - [TableFactor::JsonTable] -impl Spanned for TableFactor { - fn span(&self) -> Span { - match self { - TableFactor::Table { - name, - alias, - args: _, - with_hints: _, - version: _, - with_ordinality: _, - partitions: _, - json_path: _, - sample: _, - index_hints: _, - } => union_spans( - name.0 - .iter() - .map(|i| i.span()) - .chain(alias.as_ref().map(|alias| { - union_spans( - iter::once(alias.name.span) - .chain(alias.columns.iter().map(|i| i.span())), - ) - })), - ), - TableFactor::Derived { - lateral: _, - subquery, - alias, - } => subquery - .span() - .union_opt(&alias.as_ref().map(|alias| alias.span())), - TableFactor::TableFunction { expr, alias } => expr - .span() - .union_opt(&alias.as_ref().map(|alias| alias.span())), - TableFactor::UNNEST { - alias, - with_offset: _, - with_offset_alias, - array_exprs, - with_ordinality: _, - } => union_spans( - alias - .iter() - .map(|i| i.span()) - .chain(array_exprs.iter().map(|i| i.span())) - .chain(with_offset_alias.as_ref().map(|i| i.span)), - ), - TableFactor::NestedJoin { - table_with_joins, - alias, - } => table_with_joins - .span() - .union_opt(&alias.as_ref().map(|alias| alias.span())), - TableFactor::Function { - lateral: _, - name, - args, - alias, - } => union_spans( - name.0 - .iter() - .map(|i| i.span()) - .chain(args.iter().map(|i| i.span())) - .chain(alias.as_ref().map(|alias| alias.span())), - ), - TableFactor::JsonTable { .. } => Span::empty(), - TableFactor::XmlTable { .. } => Span::empty(), - TableFactor::Pivot { - table, - aggregate_functions, - value_column, - value_source, - default_on_null, - alias, - } => union_spans( - core::iter::once(table.span()) - .chain(aggregate_functions.iter().map(|i| i.span())) - .chain(value_column.iter().map(|i| i.span)) - .chain(core::iter::once(value_source.span())) - .chain(default_on_null.as_ref().map(|i| i.span())) - .chain(alias.as_ref().map(|i| i.span())), - ), - TableFactor::Unpivot { - table, - value, - null_inclusion: _, - name, - columns, - alias, - } => union_spans( - core::iter::once(table.span()) - .chain(core::iter::once(value.span)) - .chain(core::iter::once(name.span)) - .chain(columns.iter().map(|i| i.span)) - .chain(alias.as_ref().map(|alias| alias.span())), - ), - TableFactor::MatchRecognize { - table, - partition_by, - order_by, - measures, - rows_per_match: _, - after_match_skip: _, - pattern, - symbols, - alias, - } => union_spans( - core::iter::once(table.span()) - .chain(partition_by.iter().map(|i| i.span())) - .chain(order_by.iter().map(|i| i.span())) - .chain(measures.iter().map(|i| i.span())) - .chain(core::iter::once(pattern.span())) - .chain(symbols.iter().map(|i| i.span())) - .chain(alias.as_ref().map(|i| i.span())), - ), - TableFactor::OpenJsonTable { .. } => Span::empty(), - } - } -} - -impl Spanned for PivotValueSource { - fn span(&self) -> Span { - match self { - PivotValueSource::List(vec) => union_spans(vec.iter().map(|i| i.span())), - PivotValueSource::Any(vec) => union_spans(vec.iter().map(|i| i.span())), - PivotValueSource::Subquery(query) => query.span(), - } - } -} - -impl Spanned for ExprWithAlias { - fn span(&self) -> Span { - let ExprWithAlias { expr, alias } = self; - - expr.span().union_opt(&alias.as_ref().map(|i| i.span)) - } -} - -/// # missing span -impl Spanned for MatchRecognizePattern { - fn span(&self) -> Span { - Span::empty() - } -} - -impl Spanned for SymbolDefinition { - fn span(&self) -> Span { - let SymbolDefinition { symbol, definition } = self; - - symbol.span.union(&definition.span()) - } -} - -impl Spanned for Measure { - fn span(&self) -> Span { - let Measure { expr, alias } = self; - - expr.span().union(&alias.span) - } -} - -impl Spanned for OrderByExpr { - fn span(&self) -> Span { - let OrderByExpr { - expr, - options: _, - with_fill, - } = self; - - expr.span().union_opt(&with_fill.as_ref().map(|f| f.span())) - } -} - -impl Spanned for WithFill { - fn span(&self) -> Span { - let WithFill { from, to, step } = self; - - union_spans( - from.iter() - .map(|f| f.span()) - .chain(to.iter().map(|t| t.span())) - .chain(step.iter().map(|s| s.span())), - ) - } -} - -impl Spanned for FunctionArg { - fn span(&self) -> Span { - match self { - FunctionArg::Named { - name, - arg, - operator: _, - } => name.span.union(&arg.span()), - FunctionArg::Unnamed(arg) => arg.span(), - FunctionArg::ExprNamed { - name, - arg, - operator: _, - } => name.span().union(&arg.span()), - } - } -} - -/// # partial span -/// -/// Missing spans: -/// - [FunctionArgExpr::Wildcard] -impl Spanned for FunctionArgExpr { - fn span(&self) -> Span { - match self { - FunctionArgExpr::Expr(expr) => expr.span(), - FunctionArgExpr::QualifiedWildcard(object_name) => { - union_spans(object_name.0.iter().map(|i| i.span())) - } - FunctionArgExpr::Wildcard => Span::empty(), - } - } -} - -impl Spanned for TableAlias { - fn span(&self) -> Span { - let TableAlias { name, columns } = self; - - union_spans(iter::once(name.span).chain(columns.iter().map(|i| i.span()))) - } -} - -impl Spanned for TableAliasColumnDef { - fn span(&self) -> Span { - let TableAliasColumnDef { name, data_type: _ } = self; - - name.span - } -} - -impl Spanned for ValueWithSpan { - fn span(&self) -> Span { - self.span - } -} - -/// The span is stored in the `ValueWrapper` struct -impl Spanned for Value { - fn span(&self) -> Span { - Span::empty() // # todo: Value needs to store spans before this is possible - } -} - -impl Spanned for Join { - fn span(&self) -> Span { - let Join { - relation, - global: _, // bool - join_operator, - } = self; - - relation.span().union(&join_operator.span()) - } -} - -/// # partial span -/// -/// Missing spans: -/// - [JoinOperator::CrossJoin] -/// - [JoinOperator::CrossApply] -/// - [JoinOperator::OuterApply] -impl Spanned for JoinOperator { - fn span(&self) -> Span { - match self { - JoinOperator::Join(join_constraint) => join_constraint.span(), - JoinOperator::Inner(join_constraint) => join_constraint.span(), - JoinOperator::Left(join_constraint) => join_constraint.span(), - JoinOperator::LeftOuter(join_constraint) => join_constraint.span(), - JoinOperator::Right(join_constraint) => join_constraint.span(), - JoinOperator::RightOuter(join_constraint) => join_constraint.span(), - JoinOperator::FullOuter(join_constraint) => join_constraint.span(), - JoinOperator::CrossJoin => Span::empty(), - JoinOperator::LeftSemi(join_constraint) => join_constraint.span(), - JoinOperator::RightSemi(join_constraint) => join_constraint.span(), - JoinOperator::LeftAnti(join_constraint) => join_constraint.span(), - JoinOperator::RightAnti(join_constraint) => join_constraint.span(), - JoinOperator::CrossApply => Span::empty(), - JoinOperator::OuterApply => Span::empty(), - JoinOperator::AsOf { - match_condition, - constraint, - } => match_condition.span().union(&constraint.span()), - JoinOperator::Anti(join_constraint) => join_constraint.span(), - JoinOperator::Semi(join_constraint) => join_constraint.span(), - JoinOperator::StraightJoin(join_constraint) => join_constraint.span(), - } - } -} - -/// # partial span -/// -/// Missing spans: -/// - [JoinConstraint::Natural] -/// - [JoinConstraint::None] -impl Spanned for JoinConstraint { - fn span(&self) -> Span { - match self { - JoinConstraint::On(expr) => expr.span(), - JoinConstraint::Using(vec) => union_spans(vec.iter().map(|i| i.span())), - JoinConstraint::Natural => Span::empty(), - JoinConstraint::None => Span::empty(), - } - } -} - -impl Spanned for TableWithJoins { - fn span(&self) -> Span { - let TableWithJoins { relation, joins } = self; - - union_spans(core::iter::once(relation.span()).chain(joins.iter().map(|item| item.span()))) - } -} - -impl Spanned for Select { - fn span(&self) -> Span { - let Select { - select_token, - distinct: _, // todo - top: _, // todo, mysql specific - projection, - exclude: _, - into, - from, - lateral_views, - prewhere, - selection, - group_by, - cluster_by, - distribute_by, - sort_by, - having, - named_window, - qualify, - window_before_qualify: _, // bool - value_table_mode: _, // todo, BigQuery specific - connect_by, - top_before_distinct: _, - flavor: _, - } = self; - - union_spans( - core::iter::once(select_token.0.span) - .chain(projection.iter().map(|item| item.span())) - .chain(into.iter().map(|item| item.span())) - .chain(from.iter().map(|item| item.span())) - .chain(lateral_views.iter().map(|item| item.span())) - .chain(prewhere.iter().map(|item| item.span())) - .chain(selection.iter().map(|item| item.span())) - .chain(core::iter::once(group_by.span())) - .chain(cluster_by.iter().map(|item| item.span())) - .chain(distribute_by.iter().map(|item| item.span())) - .chain(sort_by.iter().map(|item| item.span())) - .chain(having.iter().map(|item| item.span())) - .chain(named_window.iter().map(|item| item.span())) - .chain(qualify.iter().map(|item| item.span())) - .chain(connect_by.iter().map(|item| item.span())), - ) - } -} - -impl Spanned for ConnectBy { - fn span(&self) -> Span { - let ConnectBy { - condition, - relationships, - } = self; - - union_spans( - core::iter::once(condition.span()).chain(relationships.iter().map(|item| item.span())), - ) - } -} - -impl Spanned for NamedWindowDefinition { - fn span(&self) -> Span { - let NamedWindowDefinition( - ident, - _, // todo: NamedWindowExpr - ) = self; - - ident.span - } -} - -impl Spanned for LateralView { - fn span(&self) -> Span { - let LateralView { - lateral_view, - lateral_view_name, - lateral_col_alias, - outer: _, // bool - } = self; - - union_spans( - core::iter::once(lateral_view.span()) - .chain(core::iter::once(lateral_view_name.span())) - .chain(lateral_col_alias.iter().map(|i| i.span)), - ) - } -} - -impl Spanned for SelectInto { - fn span(&self) -> Span { - let SelectInto { - temporary: _, // bool - unlogged: _, // bool - table: _, // bool - name, - } = self; - - name.span() - } -} - -impl Spanned for UpdateTableFromKind { - fn span(&self) -> Span { - let from = match self { - UpdateTableFromKind::BeforeSet(from) => from, - UpdateTableFromKind::AfterSet(from) => from, - }; - union_spans(from.iter().map(|t| t.span())) - } -} - -impl Spanned for TableObject { - fn span(&self) -> Span { - match self { - TableObject::TableName(ObjectName(segments)) => { - union_spans(segments.iter().map(|i| i.span())) - } - TableObject::TableFunction(func) => func.span(), - } - } -} - -impl Spanned for BeginEndStatements { - fn span(&self) -> Span { - let BeginEndStatements { - begin_token, - statements, - end_token, - } = self; - union_spans( - core::iter::once(begin_token.0.span) - .chain(statements.iter().map(|i| i.span())) - .chain(core::iter::once(end_token.0.span)), - ) - } -} - -impl Spanned for OpenStatement { - fn span(&self) -> Span { - let OpenStatement { cursor_name } = self; - cursor_name.span - } -} - -#[cfg(test)] -pub mod tests { - use crate::dialect::{Dialect, GenericDialect, SnowflakeDialect}; - use crate::parser::Parser; - use crate::tokenizer::Span; - - use super::*; - - struct SpanTest<'a>(Parser<'a>, &'a str); - - impl<'a> SpanTest<'a> { - fn new(dialect: &'a dyn Dialect, sql: &'a str) -> Self { - Self(Parser::new(dialect).try_with_sql(sql).unwrap(), sql) - } - - // get the subsection of the source string that corresponds to the span - // only works on single-line strings - fn get_source(&self, span: Span) -> &'a str { - // lines in spans are 1-indexed - &self.1[(span.start.column as usize - 1)..(span.end.column - 1) as usize] - } - } - - #[test] - fn test_join() { - let dialect = &GenericDialect; - let mut test = SpanTest::new( - dialect, - "SELECT id, name FROM users LEFT JOIN companies ON users.company_id = companies.id", - ); - - let query = test.0.parse_select().unwrap(); - let select_span = query.span(); - - assert_eq!( - test.get_source(select_span), - "SELECT id, name FROM users LEFT JOIN companies ON users.company_id = companies.id" - ); - - let join_span = query.from[0].joins[0].span(); - - // 'LEFT JOIN' missing - assert_eq!( - test.get_source(join_span), - "companies ON users.company_id = companies.id" - ); - } - - #[test] - pub fn test_union() { - let dialect = &GenericDialect; - let mut test = SpanTest::new( - dialect, - "SELECT a FROM postgres.public.source UNION SELECT a FROM postgres.public.source", - ); - - let query = test.0.parse_query().unwrap(); - let select_span = query.span(); - - assert_eq!( - test.get_source(select_span), - "SELECT a FROM postgres.public.source UNION SELECT a FROM postgres.public.source" - ); - } - - #[test] - pub fn test_subquery() { - let dialect = &GenericDialect; - let mut test = SpanTest::new( - dialect, - "SELECT a FROM (SELECT a FROM postgres.public.source) AS b", - ); - - let query = test.0.parse_select().unwrap(); - let select_span = query.span(); - - assert_eq!( - test.get_source(select_span), - "SELECT a FROM (SELECT a FROM postgres.public.source) AS b" - ); - - let subquery_span = query.from[0].span(); - - // left paren missing - assert_eq!( - test.get_source(subquery_span), - "SELECT a FROM postgres.public.source) AS b" - ); - } - - #[test] - pub fn test_cte() { - let dialect = &GenericDialect; - let mut test = SpanTest::new(dialect, "WITH cte_outer AS (SELECT a FROM postgres.public.source), cte_ignored AS (SELECT a FROM cte_outer), cte_inner AS (SELECT a FROM cte_outer) SELECT a FROM cte_inner"); - - let query = test.0.parse_query().unwrap(); - - let select_span = query.span(); - - assert_eq!(test.get_source(select_span), "WITH cte_outer AS (SELECT a FROM postgres.public.source), cte_ignored AS (SELECT a FROM cte_outer), cte_inner AS (SELECT a FROM cte_outer) SELECT a FROM cte_inner"); - } - - #[test] - pub fn test_snowflake_lateral_flatten() { - let dialect = &SnowflakeDialect; - let mut test = SpanTest::new(dialect, "SELECT FLATTENED.VALUE:field::TEXT AS FIELD FROM SNOWFLAKE.SCHEMA.SOURCE AS S, LATERAL FLATTEN(INPUT => S.JSON_ARRAY) AS FLATTENED"); - - let query = test.0.parse_select().unwrap(); - - let select_span = query.span(); - - assert_eq!(test.get_source(select_span), "SELECT FLATTENED.VALUE:field::TEXT AS FIELD FROM SNOWFLAKE.SCHEMA.SOURCE AS S, LATERAL FLATTEN(INPUT => S.JSON_ARRAY) AS FLATTENED"); - } - - #[test] - pub fn test_wildcard_from_cte() { - let dialect = &GenericDialect; - let mut test = SpanTest::new( - dialect, - "WITH cte AS (SELECT a FROM postgres.public.source) SELECT cte.* FROM cte", - ); - - let query = test.0.parse_query().unwrap(); - let cte_span = query.clone().with.unwrap().cte_tables[0].span(); - let cte_query_span = query.clone().with.unwrap().cte_tables[0].query.span(); - let body_span = query.body.span(); - - // the WITH keyboard is part of the query - assert_eq!( - test.get_source(cte_span), - "cte AS (SELECT a FROM postgres.public.source)" - ); - assert_eq!( - test.get_source(cte_query_span), - "SELECT a FROM postgres.public.source" - ); - - assert_eq!(test.get_source(body_span), "SELECT cte.* FROM cte"); - } - - #[test] - fn test_case_expr_span() { - let dialect = &GenericDialect; - let mut test = SpanTest::new(dialect, "CASE 1 WHEN 2 THEN 3 ELSE 4 END"); - let expr = test.0.parse_expr().unwrap(); - let expr_span = expr.span(); - assert_eq!( - test.get_source(expr_span), - "CASE 1 WHEN 2 THEN 3 ELSE 4 END" - ); - } -} diff --git a/src/ast/trigger.rs b/src/ast/trigger.rs index 2c64e423..cf1c8c46 100644 --- a/src/ast/trigger.rs +++ b/src/ast/trigger.rs @@ -110,7 +110,6 @@ impl fmt::Display for TriggerEvent { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum TriggerPeriod { - For, After, Before, InsteadOf, @@ -119,7 +118,6 @@ pub enum TriggerPeriod { impl fmt::Display for TriggerPeriod { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - TriggerPeriod::For => write!(f, "FOR"), TriggerPeriod::After => write!(f, "AFTER"), TriggerPeriod::Before => write!(f, "BEFORE"), TriggerPeriod::InsteadOf => write!(f, "INSTEAD OF"), diff --git a/src/ast/value.rs b/src/ast/value.rs index fdfa6a67..30d956a0 100644 --- a/src/ast/value.rs +++ b/src/ast/value.rs @@ -26,96 +26,14 @@ use bigdecimal::BigDecimal; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; -use crate::{ast::Ident, tokenizer::Span}; +use crate::ast::Ident; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; -/// Wraps a primitive SQL [`Value`] with its [`Span`] location -/// -/// # Example: create a `ValueWithSpan` from a `Value` -/// ``` -/// # use sqlparser::ast::{Value, ValueWithSpan}; -/// # use sqlparser::tokenizer::{Location, Span}; -/// let value = Value::SingleQuotedString(String::from("endpoint")); -/// // from line 1, column 1 to line 1, column 7 -/// let span = Span::new(Location::new(1, 1), Location::new(1, 7)); -/// let value_with_span = value.with_span(span); -/// ``` -/// -/// # Example: create a `ValueWithSpan` from a `Value` with an empty span -/// -/// You can call [`Value::with_empty_span`] to create a `ValueWithSpan` with an empty span -/// ``` -/// # use sqlparser::ast::{Value, ValueWithSpan}; -/// # use sqlparser::tokenizer::{Location, Span}; -/// let value = Value::SingleQuotedString(String::from("endpoint")); -/// let value_with_span = value.with_empty_span(); -/// assert_eq!(value_with_span.span, Span::empty()); -/// ``` -/// -/// You can also use the [`From`] trait to convert `ValueWithSpan` to/from `Value`s -/// ``` -/// # use sqlparser::ast::{Value, ValueWithSpan}; -/// # use sqlparser::tokenizer::{Location, Span}; -/// let value = Value::SingleQuotedString(String::from("endpoint")); -/// // converting `Value` to `ValueWithSpan` results in an empty span -/// let value_with_span: ValueWithSpan = value.into(); -/// assert_eq!(value_with_span.span, Span::empty()); -/// // convert back to `Value` -/// let value: Value = value_with_span.into(); -/// ``` -#[derive(Debug, Clone, Eq)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct ValueWithSpan { - pub value: Value, - pub span: Span, -} - -impl PartialEq for ValueWithSpan { - fn eq(&self, other: &Self) -> bool { - self.value == other.value - } -} - -impl Ord for ValueWithSpan { - fn cmp(&self, other: &Self) -> core::cmp::Ordering { - self.value.cmp(&other.value) - } -} - -impl PartialOrd for ValueWithSpan { - fn partial_cmp(&self, other: &Self) -> Option { - Some(Ord::cmp(self, other)) - } -} - -impl core::hash::Hash for ValueWithSpan { - fn hash(&self, state: &mut H) { - self.value.hash(state); - } -} - -impl From for ValueWithSpan { - fn from(value: Value) -> Self { - value.with_empty_span() - } -} - -impl From for Value { - fn from(value: ValueWithSpan) -> Self { - value.value - } -} - /// Primitive SQL values such as number and string #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr( - feature = "visitor", - derive(Visit, VisitMut), - visit(with = "visit_value") -)] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum Value { /// Numeric literal #[cfg(not(feature = "bigdecimal"))] @@ -179,53 +97,6 @@ pub enum Value { Placeholder(String), } -impl ValueWithSpan { - /// If the underlying literal is a string, regardless of quote style, returns the associated string value - pub fn into_string(self) -> Option { - self.value.into_string() - } -} - -impl Value { - /// If the underlying literal is a string, regardless of quote style, returns the associated string value - pub fn into_string(self) -> Option { - match self { - Value::SingleQuotedString(s) - | Value::DoubleQuotedString(s) - | Value::TripleSingleQuotedString(s) - | Value::TripleDoubleQuotedString(s) - | Value::SingleQuotedByteStringLiteral(s) - | Value::DoubleQuotedByteStringLiteral(s) - | Value::TripleSingleQuotedByteStringLiteral(s) - | Value::TripleDoubleQuotedByteStringLiteral(s) - | Value::SingleQuotedRawStringLiteral(s) - | Value::DoubleQuotedRawStringLiteral(s) - | Value::TripleSingleQuotedRawStringLiteral(s) - | Value::TripleDoubleQuotedRawStringLiteral(s) - | Value::EscapedStringLiteral(s) - | Value::UnicodeStringLiteral(s) - | Value::NationalStringLiteral(s) - | Value::HexStringLiteral(s) => Some(s), - Value::DollarQuotedString(s) => Some(s.value), - _ => None, - } - } - - pub fn with_span(self, span: Span) -> ValueWithSpan { - ValueWithSpan { value: self, span } - } - - pub fn with_empty_span(self) -> ValueWithSpan { - self.with_span(Span::empty()) - } -} - -impl fmt::Display for ValueWithSpan { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.value) - } -} - impl fmt::Display for Value { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { @@ -284,9 +155,7 @@ impl fmt::Display for DollarQuotedString { #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum DateTimeField { Year, - Years, Month, - Months, /// Week optionally followed by a WEEKDAY. /// /// ```sql @@ -295,19 +164,14 @@ pub enum DateTimeField { /// /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/date_functions#extract) Week(Option), - Weeks, Day, DayOfWeek, DayOfYear, - Days, Date, Datetime, Hour, - Hours, Minute, - Minutes, Second, - Seconds, Century, Decade, Dow, @@ -346,9 +210,7 @@ impl fmt::Display for DateTimeField { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { DateTimeField::Year => write!(f, "YEAR"), - DateTimeField::Years => write!(f, "YEARS"), DateTimeField::Month => write!(f, "MONTH"), - DateTimeField::Months => write!(f, "MONTHS"), DateTimeField::Week(week_day) => { write!(f, "WEEK")?; if let Some(week_day) = week_day { @@ -356,19 +218,14 @@ impl fmt::Display for DateTimeField { } Ok(()) } - DateTimeField::Weeks => write!(f, "WEEKS"), DateTimeField::Day => write!(f, "DAY"), DateTimeField::DayOfWeek => write!(f, "DAYOFWEEK"), DateTimeField::DayOfYear => write!(f, "DAYOFYEAR"), - DateTimeField::Days => write!(f, "DAYS"), DateTimeField::Date => write!(f, "DATE"), DateTimeField::Datetime => write!(f, "DATETIME"), DateTimeField::Hour => write!(f, "HOUR"), - DateTimeField::Hours => write!(f, "HOURS"), DateTimeField::Minute => write!(f, "MINUTE"), - DateTimeField::Minutes => write!(f, "MINUTES"), DateTimeField::Second => write!(f, "SECOND"), - DateTimeField::Seconds => write!(f, "SECONDS"), DateTimeField::Century => write!(f, "CENTURY"), DateTimeField::Decade => write!(f, "DECADE"), DateTimeField::Dow => write!(f, "DOW"), @@ -399,41 +256,12 @@ impl fmt::Display for DateTimeField { } } -#[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -/// The Unicode Standard defines four normalization forms, which are intended to eliminate -/// certain distinctions between visually or functionally identical characters. -/// -/// See [Unicode Normalization Forms](https://unicode.org/reports/tr15/) for details. -pub enum NormalizationForm { - /// Canonical Decomposition, followed by Canonical Composition. - NFC, - /// Canonical Decomposition. - NFD, - /// Compatibility Decomposition, followed by Canonical Composition. - NFKC, - /// Compatibility Decomposition. - NFKD, -} - -impl fmt::Display for NormalizationForm { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - NormalizationForm::NFC => write!(f, "NFC"), - NormalizationForm::NFD => write!(f, "NFD"), - NormalizationForm::NFKC => write!(f, "NFKC"), - NormalizationForm::NFKD => write!(f, "NFKD"), - } - } -} - pub struct EscapeQuotedString<'a> { string: &'a str, quote: char, } -impl fmt::Display for EscapeQuotedString<'_> { +impl<'a> fmt::Display for EscapeQuotedString<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // EscapeQuotedString doesn't know which mode of escape was // chosen by the user. So this code must to correctly display @@ -455,38 +283,30 @@ impl fmt::Display for EscapeQuotedString<'_> { // | `"A\"B\"A"` | default | `DoubleQuotedString(String::from("A\"B\"A"))` | `"A""B""A"` | let quote = self.quote; let mut previous_char = char::default(); - let mut start_idx = 0; - let mut peekable_chars = self.string.char_indices().peekable(); - while let Some(&(idx, ch)) = peekable_chars.peek() { + let mut peekable_chars = self.string.chars().peekable(); + while let Some(&ch) = peekable_chars.peek() { match ch { char if char == quote => { if previous_char == '\\' { - // the quote is already escaped with a backslash, skip + write!(f, "{char}")?; peekable_chars.next(); continue; } peekable_chars.next(); - match peekable_chars.peek() { - Some((_, c)) if *c == quote => { - // the quote is already escaped with another quote, skip - peekable_chars.next(); - } - _ => { - // The quote is not escaped. - // Including idx in the range, so the quote at idx will be printed twice: - // in this call to write_str() and in the next one. - f.write_str(&self.string[start_idx..=idx])?; - start_idx = idx; - } + if peekable_chars.peek().map(|c| *c == quote).unwrap_or(false) { + write!(f, "{char}{char}")?; + peekable_chars.next(); + } else { + write!(f, "{char}{char}")?; } } _ => { + write!(f, "{ch}")?; peekable_chars.next(); } } previous_char = ch; } - f.write_str(&self.string[start_idx..])?; Ok(()) } } @@ -505,7 +325,7 @@ pub fn escape_double_quote_string(s: &str) -> EscapeQuotedString<'_> { pub struct EscapeEscapedStringLiteral<'a>(&'a str); -impl fmt::Display for EscapeEscapedStringLiteral<'_> { +impl<'a> fmt::Display for EscapeEscapedStringLiteral<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for c in self.0.chars() { match c { @@ -539,7 +359,7 @@ pub fn escape_escaped_string(s: &str) -> EscapeEscapedStringLiteral<'_> { pub struct EscapeUnicodeStringLiteral<'a>(&'a str); -impl fmt::Display for EscapeUnicodeStringLiteral<'_> { +impl<'a> fmt::Display for EscapeUnicodeStringLiteral<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for c in self.0.chars() { match c { @@ -550,16 +370,16 @@ impl fmt::Display for EscapeUnicodeStringLiteral<'_> { write!(f, r#"\\"#)?; } x if x.is_ascii() => { - write!(f, "{c}")?; + write!(f, "{}", c)?; } _ => { let codepoint = c as u32; // if the character fits in 32 bits, we can use the \XXXX format // otherwise, we need to use the \+XXXXXX format if codepoint <= 0xFFFF { - write!(f, "\\{codepoint:04X}")?; + write!(f, "\\{:04X}", codepoint)?; } else { - write!(f, "\\+{codepoint:06X}")?; + write!(f, "\\+{:06X}", codepoint)?; } } } diff --git a/src/ast/visitor.rs b/src/ast/visitor.rs index 8e0a3139..418e0a29 100644 --- a/src/ast/visitor.rs +++ b/src/ast/visitor.rs @@ -17,7 +17,7 @@ //! Recursive visitors for ast Nodes. See [`Visitor`] for more details. -use crate::ast::{Expr, ObjectName, Query, Statement, TableFactor, Value}; +use crate::ast::{Expr, ObjectName, Query, Statement, TableFactor}; use core::ops::ControlFlow; /// A type that can be visited by a [`Visitor`]. See [`Visitor`] for @@ -233,16 +233,6 @@ pub trait Visitor { fn post_visit_statement(&mut self, _statement: &Statement) -> ControlFlow { ControlFlow::Continue(()) } - - /// Invoked for any Value that appear in the AST before visiting children - fn pre_visit_value(&mut self, _value: &Value) -> ControlFlow { - ControlFlow::Continue(()) - } - - /// Invoked for any Value that appear in the AST after visiting children - fn post_visit_value(&mut self, _value: &Value) -> ControlFlow { - ControlFlow::Continue(()) - } } /// A visitor that can be used to mutate an AST tree. @@ -347,16 +337,6 @@ pub trait VisitorMut { fn post_visit_statement(&mut self, _statement: &mut Statement) -> ControlFlow { ControlFlow::Continue(()) } - - /// Invoked for any value that appear in the AST before visiting children - fn pre_visit_value(&mut self, _value: &mut Value) -> ControlFlow { - ControlFlow::Continue(()) - } - - /// Invoked for any statements that appear in the AST after visiting children - fn post_visit_value(&mut self, _value: &mut Value) -> ControlFlow { - ControlFlow::Continue(()) - } } struct RelationVisitor(F); @@ -423,7 +403,7 @@ where /// ``` /// # use sqlparser::parser::Parser; /// # use sqlparser::dialect::GenericDialect; -/// # use sqlparser::ast::{ObjectName, ObjectNamePart, Ident, visit_relations_mut}; +/// # use sqlparser::ast::{ObjectName, visit_relations_mut}; /// # use core::ops::ControlFlow; /// let sql = "SELECT a FROM foo"; /// let mut statements = Parser::parse_sql(&GenericDialect{}, sql) @@ -431,7 +411,7 @@ where /// /// // visit statements, renaming table foo to bar /// visit_relations_mut(&mut statements, |table| { -/// table.0[0] = ObjectNamePart::Identifier(Ident::new("bar")); +/// table.0[0].value = table.0[0].value.replace("foo", "bar"); /// ControlFlow::<()>::Continue(()) /// }); /// @@ -523,7 +503,7 @@ where /// // Remove all select limits in sub-queries /// visit_expressions_mut(&mut statements, |expr| { /// if let Expr::Subquery(q) = expr { -/// q.limit_clause = None; +/// q.limit = None /// } /// ControlFlow::<()>::Continue(()) /// }); @@ -547,10 +527,9 @@ where /// /// visit_expressions_mut(&mut statements, |expr| { /// if matches!(expr, Expr::Identifier(col_name) if col_name.value == "x") { -/// let old_expr = std::mem::replace(expr, Expr::value(Value::Null)); +/// let old_expr = std::mem::replace(expr, Expr::Value(Value::Null)); /// *expr = Expr::Function(Function { -/// name: ObjectName::from(vec![Ident::new("f")]), -/// uses_odbc_syntax: false, +/// name: ObjectName(vec![Ident::new("f")]), /// args: FunctionArguments::List(FunctionArgumentList { /// duplicate_treatment: None, /// args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(old_expr))], @@ -647,7 +626,7 @@ where /// // Remove all select limits in outer statements (not in sub-queries) /// visit_statements_mut(&mut statements, |stmt| { /// if let Statement::Query(q) = stmt { -/// q.limit_clause = None; +/// q.limit = None /// } /// ControlFlow::<()>::Continue(()) /// }); @@ -667,7 +646,6 @@ where #[cfg(test)] mod tests { use super::*; - use crate::ast::Statement; use crate::dialect::GenericDialect; use crate::parser::Parser; use crate::tokenizer::Tokenizer; @@ -741,7 +719,7 @@ mod tests { } } - fn do_visit>(sql: &str, visitor: &mut V) -> Statement { + fn do_visit(sql: &str) -> Vec { let dialect = GenericDialect {}; let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); let s = Parser::new(&dialect) @@ -749,9 +727,9 @@ mod tests { .parse_statement() .unwrap(); - let flow = s.visit(visitor); - assert_eq!(flow, ControlFlow::Continue(())); - s + let mut visitor = TestVisitor::default(); + s.visit(&mut visitor); + visitor.visited } #[test] @@ -898,113 +876,12 @@ mod tests { "POST: QUERY: SELECT * FROM monthly_sales PIVOT(SUM(a.amount) FOR a.MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) AS p (c, d) ORDER BY EMPID", "POST: STATEMENT: SELECT * FROM monthly_sales PIVOT(SUM(a.amount) FOR a.MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) AS p (c, d) ORDER BY EMPID", ] - ), - ( - "SHOW COLUMNS FROM t1", - vec![ - "PRE: STATEMENT: SHOW COLUMNS FROM t1", - "PRE: RELATION: t1", - "POST: RELATION: t1", - "POST: STATEMENT: SHOW COLUMNS FROM t1", - ], - ), + ) ]; for (sql, expected) in tests { - let mut visitor = TestVisitor::default(); - let _ = do_visit(sql, &mut visitor); - let actual: Vec<_> = visitor.visited.iter().map(|x| x.as_str()).collect(); + let actual = do_visit(sql); + let actual: Vec<_> = actual.iter().map(|x| x.as_str()).collect(); assert_eq!(actual, expected) } } - - struct QuickVisitor; // [`TestVisitor`] is too slow to iterate over thousands of nodes - - impl Visitor for QuickVisitor { - type Break = (); - } - - #[test] - fn overflow() { - let cond = (0..1000) - .map(|n| format!("X = {n}")) - .collect::>() - .join(" OR "); - let sql = format!("SELECT x where {cond}"); - - let dialect = GenericDialect {}; - let tokens = Tokenizer::new(&dialect, sql.as_str()).tokenize().unwrap(); - let s = Parser::new(&dialect) - .with_tokens(tokens) - .parse_statement() - .unwrap(); - - let mut visitor = QuickVisitor {}; - let flow = s.visit(&mut visitor); - assert_eq!(flow, ControlFlow::Continue(())); - } -} - -#[cfg(test)] -mod visit_mut_tests { - use crate::ast::{Statement, Value, VisitMut, VisitorMut}; - use crate::dialect::GenericDialect; - use crate::parser::Parser; - use crate::tokenizer::Tokenizer; - use core::ops::ControlFlow; - - #[derive(Default)] - struct MutatorVisitor { - index: u64, - } - - impl VisitorMut for MutatorVisitor { - type Break = (); - - fn pre_visit_value(&mut self, value: &mut Value) -> ControlFlow { - self.index += 1; - *value = Value::SingleQuotedString(format!("REDACTED_{}", self.index)); - ControlFlow::Continue(()) - } - - fn post_visit_value(&mut self, _value: &mut Value) -> ControlFlow { - ControlFlow::Continue(()) - } - } - - fn do_visit_mut>(sql: &str, visitor: &mut V) -> Statement { - let dialect = GenericDialect {}; - let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); - let mut s = Parser::new(&dialect) - .with_tokens(tokens) - .parse_statement() - .unwrap(); - - let flow = s.visit(visitor); - assert_eq!(flow, ControlFlow::Continue(())); - s - } - - #[test] - fn test_value_redact() { - let tests = vec![ - ( - concat!( - "SELECT * FROM monthly_sales ", - "PIVOT(SUM(a.amount) FOR a.MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) AS p (c, d) ", - "ORDER BY EMPID" - ), - concat!( - "SELECT * FROM monthly_sales ", - "PIVOT(SUM(a.amount) FOR a.MONTH IN ('REDACTED_1', 'REDACTED_2', 'REDACTED_3', 'REDACTED_4')) AS p (c, d) ", - "ORDER BY EMPID" - ), - ), - ]; - - for (sql, expected) in tests { - let mut visitor = MutatorVisitor::default(); - let mutated = do_visit_mut(sql, &mut visitor); - assert_eq!(mutated.to_string(), expected) - } - } } diff --git a/src/dialect/bigquery.rs b/src/dialect/bigquery.rs index c2cd507c..96633552 100644 --- a/src/dialect/bigquery.rs +++ b/src/dialect/bigquery.rs @@ -15,45 +15,14 @@ // specific language governing permissions and limitations // under the License. -use crate::ast::Statement; use crate::dialect::Dialect; -use crate::keywords::Keyword; -use crate::parser::{Parser, ParserError}; - -/// These keywords are disallowed as column identifiers. Such that -/// `SELECT 5 AS FROM T` is rejected by BigQuery. -const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[ - Keyword::WITH, - Keyword::SELECT, - Keyword::WHERE, - Keyword::GROUP, - Keyword::HAVING, - Keyword::ORDER, - Keyword::LATERAL, - Keyword::LIMIT, - Keyword::FETCH, - Keyword::UNION, - Keyword::EXCEPT, - Keyword::INTERSECT, - Keyword::FROM, - Keyword::INTO, - Keyword::END, -]; /// A [`Dialect`] for [Google Bigquery](https://cloud.google.com/bigquery/) #[derive(Debug, Default)] pub struct BigQueryDialect; impl Dialect for BigQueryDialect { - fn parse_statement(&self, parser: &mut Parser) -> Option> { - if parser.parse_keyword(Keyword::BEGIN) { - return Some(parser.parse_begin_exception_end()); - } - - None - } - - /// See + // See https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#identifiers fn is_delimited_identifier_start(&self, ch: char) -> bool { ch == '`' } @@ -62,16 +31,8 @@ impl Dialect for BigQueryDialect { true } - /// See - fn supports_column_definition_trailing_commas(&self) -> bool { - true - } - fn is_identifier_start(&self, ch: char) -> bool { ch.is_ascii_lowercase() || ch.is_ascii_uppercase() || ch == '_' - // BigQuery supports `@@foo.bar` variable syntax in its procedural language. - // https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend - || ch == '@' } fn is_identifier_part(&self, ch: char) -> bool { @@ -111,37 +72,4 @@ impl Dialect for BigQueryDialect { fn require_interval_qualifier(&self) -> bool { true } - - // See https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct - fn supports_struct_literal(&self) -> bool { - true - } - - /// See - fn supports_select_expr_star(&self) -> bool { - true - } - - /// See - fn supports_execute_immediate(&self) -> bool { - true - } - - // See - fn supports_timestamp_versioning(&self) -> bool { - true - } - - // See - fn supports_group_by_expr(&self) -> bool { - true - } - - fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { - !RESERVED_FOR_COLUMN_ALIAS.contains(kw) - } - - fn supports_pipe_operator(&self) -> bool { - true - } } diff --git a/src/dialect/clickhouse.rs b/src/dialect/clickhouse.rs index f5e70c30..0c8f0804 100644 --- a/src/dialect/clickhouse.rs +++ b/src/dialect/clickhouse.rs @@ -50,48 +50,4 @@ impl Dialect for ClickHouseDialect { fn supports_limit_comma(&self) -> bool { true } - - fn supports_insert_table_function(&self) -> bool { - true - } - - fn supports_insert_format(&self) -> bool { - true - } - - fn supports_numeric_literal_underscores(&self) -> bool { - true - } - - // ClickHouse uses this for some FORMAT expressions in `INSERT` context, e.g. when inserting - // with FORMAT JSONEachRow a raw JSON key-value expression is valid and expected. - // - // [ClickHouse formats](https://clickhouse.com/docs/en/interfaces/formats) - fn supports_dictionary_syntax(&self) -> bool { - true - } - - /// See - fn supports_lambda_functions(&self) -> bool { - true - } - - fn supports_from_first_select(&self) -> bool { - true - } - - /// See - fn supports_order_by_all(&self) -> bool { - true - } - - // See - fn supports_group_by_expr(&self) -> bool { - true - } - - /// See - fn supports_group_by_with_modifier(&self) -> bool { - true - } } diff --git a/src/dialect/databricks.rs b/src/dialect/databricks.rs index a3476b1b..4924e807 100644 --- a/src/dialect/databricks.rs +++ b/src/dialect/databricks.rs @@ -59,9 +59,4 @@ impl Dialect for DatabricksDialect { fn require_interval_qualifier(&self) -> bool { true } - - // See https://docs.databricks.com/en/sql/language-manual/functions/struct.html - fn supports_struct_literal(&self) -> bool { - true - } } diff --git a/src/dialect/duckdb.rs b/src/dialect/duckdb.rs index fa18463a..e1b8db11 100644 --- a/src/dialect/duckdb.rs +++ b/src/dialect/duckdb.rs @@ -47,10 +47,6 @@ impl Dialect for DuckDbDialect { true } - fn supports_named_fn_args_with_assignment_operator(&self) -> bool { - true - } - // DuckDB uses this syntax for `STRUCT`s. // // https://duckdb.org/docs/sql/data_types/struct.html#creating-structs @@ -65,37 +61,9 @@ impl Dialect for DuckDbDialect { true } - /// See - fn supports_lambda_functions(&self) -> bool { - true - } - // DuckDB is compatible with PostgreSQL syntax for this statement, // although not all features may be implemented. fn supports_explain_with_utility_options(&self) -> bool { true } - - /// See DuckDB - fn supports_load_extension(&self) -> bool { - true - } - - // See DuckDB - fn supports_array_typedef_with_brackets(&self) -> bool { - true - } - - fn supports_from_first_select(&self) -> bool { - true - } - - /// See DuckDB - fn supports_order_by_all(&self) -> bool { - true - } - - fn supports_select_wildcard_exclude(&self) -> bool { - true - } } diff --git a/src/dialect/generic.rs b/src/dialect/generic.rs index be2cc007..0a5464c9 100644 --- a/src/dialect/generic.rs +++ b/src/dialect/generic.rs @@ -48,14 +48,6 @@ impl Dialect for GenericDialect { true } - fn supports_group_by_with_modifier(&self) -> bool { - true - } - - fn supports_left_associative_joins_without_parens(&self) -> bool { - true - } - fn supports_connect_by(&self) -> bool { true } @@ -112,14 +104,6 @@ impl Dialect for GenericDialect { true } - fn supports_from_first_select(&self) -> bool { - true - } - - fn supports_projection_trailing_commas(&self) -> bool { - true - } - fn supports_asc_desc_in_column_definition(&self) -> bool { true } @@ -127,60 +111,4 @@ impl Dialect for GenericDialect { fn supports_try_convert(&self) -> bool { true } - - fn supports_comment_on(&self) -> bool { - true - } - - fn supports_load_extension(&self) -> bool { - true - } - - fn supports_named_fn_args_with_assignment_operator(&self) -> bool { - true - } - - fn supports_struct_literal(&self) -> bool { - true - } - - fn supports_empty_projections(&self) -> bool { - true - } - - fn supports_nested_comments(&self) -> bool { - true - } - - fn supports_user_host_grantee(&self) -> bool { - true - } - - fn supports_string_escape_constant(&self) -> bool { - true - } - - fn supports_array_typedef_with_brackets(&self) -> bool { - true - } - - fn supports_match_against(&self) -> bool { - true - } - - fn supports_set_names(&self) -> bool { - true - } - - fn supports_comma_separated_set_assignments(&self) -> bool { - true - } - - fn supports_filter_during_aggregation(&self) -> bool { - true - } - - fn supports_select_wildcard_exclude(&self) -> bool { - true - } } diff --git a/src/dialect/hive.rs b/src/dialect/hive.rs index 3e15d395..63642b33 100644 --- a/src/dialect/hive.rs +++ b/src/dialect/hive.rs @@ -51,24 +51,4 @@ impl Dialect for HiveDialect { fn require_interval_qualifier(&self) -> bool { true } - - /// See - fn supports_bang_not_operator(&self) -> bool { - true - } - - /// See - fn supports_load_data(&self) -> bool { - true - } - - /// See - fn supports_table_sample_before_alias(&self) -> bool { - true - } - - /// See - fn supports_group_by_with_modifier(&self) -> bool { - true - } } diff --git a/src/dialect/mod.rs b/src/dialect/mod.rs index deb5719d..453fee3d 100644 --- a/src/dialect/mod.rs +++ b/src/dialect/mod.rs @@ -49,7 +49,7 @@ pub use self::postgresql::PostgreSqlDialect; pub use self::redshift::RedshiftSqlDialect; pub use self::snowflake::SnowflakeDialect; pub use self::sqlite::SQLiteDialect; -use crate::ast::{ColumnOption, Expr, GranteesType, Ident, ObjectNamePart, Statement}; +use crate::ast::{ColumnOption, Expr, Statement}; pub use crate::keywords; use crate::keywords::Keyword; use crate::parser::{Parser, ParserError}; @@ -75,15 +75,6 @@ macro_rules! dialect_of { }; } -// Similar to above, but for applying directly against an instance of dialect -// instead of a struct member named dialect. This avoids lifetime issues when -// mixing match guards and token references. -macro_rules! dialect_is { - ($dialect:ident is $($dialect_type:ty)|+) => { - ($($dialect.is::<$dialect_type>())||+) - } -} - /// Encapsulates the differences between SQL implementations. /// /// # SQL Dialects @@ -137,41 +128,16 @@ pub trait Dialect: Debug + Any { ch == '"' || ch == '`' } - /// Determine if a character starts a potential nested quoted identifier. - /// Example: RedShift supports the following quote styles to all mean the same thing: - /// ```sql - /// SELECT 1 AS foo; - /// SELECT 1 AS "foo"; - /// SELECT 1 AS [foo]; - /// SELECT 1 AS ["foo"]; - /// ``` - fn is_nested_delimited_identifier_start(&self, _ch: char) -> bool { - false - } - - /// Only applicable whenever [`Self::is_nested_delimited_identifier_start`] returns true - /// If the next sequence of tokens potentially represent a nested identifier, then this method - /// returns a tuple containing the outer quote style, and if present, the inner (nested) quote style. - /// - /// Example (Redshift): - /// ```text - /// `["foo"]` => Some(`[`, Some(`"`)) - /// `[foo]` => Some(`[`, None) - /// `[0]` => None - /// `"foo"` => None - /// ``` - fn peek_nested_delimited_identifier_quotes( - &self, - mut _chars: Peekable>, - ) -> Option<(char, Option)> { - None - } - /// Return the character used to quote identifiers. fn identifier_quote_style(&self, _identifier: &str) -> Option { None } + /// Determine if quoted characters are proper for identifier + fn is_proper_identifier_inside_quotes(&self, mut _chars: Peekable>) -> bool { + true + } + /// Determine if a character is a valid start character for an unquoted identifier fn is_identifier_start(&self, ch: char) -> bool; @@ -201,33 +167,6 @@ pub trait Dialect: Debug + Any { false } - /// Determine whether the dialect strips the backslash when escaping LIKE wildcards (%, _). - /// - /// [MySQL] has a special case when escaping single quoted strings which leaves these unescaped - /// so they can be used in LIKE patterns without double-escaping (as is necessary in other - /// escaping dialects, such as [Snowflake]). Generally, special characters have escaping rules - /// causing them to be replaced with a different byte sequences (e.g. `'\0'` becoming the zero - /// byte), and the default if an escaped character does not have a specific escaping rule is to - /// strip the backslash (e.g. there is no rule for `h`, so `'\h' = 'h'`). MySQL's special case - /// for ignoring LIKE wildcard escapes is to *not* strip the backslash, so that `'\%' = '\\%'`. - /// This applies to all string literals though, not just those used in LIKE patterns. - /// - /// ```text - /// mysql> select '\_', hex('\\'), hex('_'), hex('\_'); - /// +----+-----------+----------+-----------+ - /// | \_ | hex('\\') | hex('_') | hex('\_') | - /// +----+-----------+----------+-----------+ - /// | \_ | 5C | 5F | 5C5F | - /// +----+-----------+----------+-----------+ - /// 1 row in set (0.00 sec) - /// ``` - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/string-literals.html - /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/functions/like#usage-notes - fn ignores_wildcard_escapes(&self) -> bool { - false - } - /// Determine if the dialect supports string literals with `U&` prefix. /// This is used to specify Unicode code points in string literals. /// For example, in PostgreSQL, the following is a valid string literal: @@ -272,55 +211,11 @@ pub trait Dialect: Debug + Any { false } - /// Returns true if the dialects supports `GROUP BY` modifiers prefixed by a `WITH` keyword. - /// Example: `GROUP BY value WITH ROLLUP`. - fn supports_group_by_with_modifier(&self) -> bool { - false - } - - /// Indicates whether the dialect supports left-associative join parsing - /// by default when parentheses are omitted in nested joins. - /// - /// Most dialects (like MySQL or Postgres) assume **left-associative** precedence, - /// so a query like: - /// - /// ```sql - /// SELECT * FROM t1 NATURAL JOIN t5 INNER JOIN t0 ON ... - /// ``` - /// is interpreted as: - /// ```sql - /// ((t1 NATURAL JOIN t5) INNER JOIN t0 ON ...) - /// ``` - /// and internally represented as a **flat list** of joins. - /// - /// In contrast, some dialects (e.g. **Snowflake**) assume **right-associative** - /// precedence and interpret the same query as: - /// ```sql - /// (t1 NATURAL JOIN (t5 INNER JOIN t0 ON ...)) - /// ``` - /// which results in a **nested join** structure in the AST. - /// - /// If this method returns `false`, the parser must build nested join trees - /// even in the absence of parentheses to reflect the correct associativity - fn supports_left_associative_joins_without_parens(&self) -> bool { - true - } - - /// Returns true if the dialect supports the `(+)` syntax for OUTER JOIN. - fn supports_outer_join_operator(&self) -> bool { - false - } - /// Returns true if the dialect supports CONNECT BY. fn supports_connect_by(&self) -> bool { false } - /// Returns true if the dialect supports `EXECUTE IMMEDIATE` statements. - fn supports_execute_immediate(&self) -> bool { - false - } - /// Returns true if the dialect supports the MATCH_RECOGNIZE operation. fn supports_match_recognize(&self) -> bool { false @@ -331,55 +226,22 @@ pub trait Dialect: Debug + Any { false } - /// Returns true if the dialect supports `BEGIN {DEFERRED | IMMEDIATE | EXCLUSIVE | TRY | CATCH} [TRANSACTION]` statements + /// Returns true if the dialect supports `BEGIN {DEFERRED | IMMEDIATE | EXCLUSIVE} [TRANSACTION]` statements fn supports_start_transaction_modifier(&self) -> bool { false } - /// Returns true if the dialect supports `END {TRY | CATCH}` statements - fn supports_end_transaction_modifier(&self) -> bool { - false - } - - /// Returns true if the dialect supports named arguments of the form `FUN(a = '1', b = '2')`. + /// Returns true if the dialect supports named arguments of the form FUN(a = '1', b = '2'). fn supports_named_fn_args_with_eq_operator(&self) -> bool { false } - /// Returns true if the dialect supports named arguments of the form `FUN(a : '1', b : '2')`. - fn supports_named_fn_args_with_colon_operator(&self) -> bool { - false - } - - /// Returns true if the dialect supports named arguments of the form `FUN(a := '1', b := '2')`. - fn supports_named_fn_args_with_assignment_operator(&self) -> bool { - false - } - - /// Returns true if the dialect supports named arguments of the form `FUN(a => '1', b => '2')`. - fn supports_named_fn_args_with_rarrow_operator(&self) -> bool { - true - } - - /// Returns true if dialect supports argument name as arbitrary expression. - /// e.g. `FUN(LOWER('a'):'1', b:'2')` - /// Such function arguments are represented in the AST by the `FunctionArg::ExprNamed` variant, - /// otherwise use the `FunctionArg::Named` variant (compatible reason). - fn supports_named_fn_args_with_expr_name(&self) -> bool { - false - } - /// Returns true if the dialect supports identifiers starting with a numeric /// prefix such as tables named `59901_user_login` fn supports_numeric_prefix(&self) -> bool { false } - /// Returns true if the dialect supports numbers containing underscores, e.g. `10_000_000` - fn supports_numeric_literal_underscores(&self) -> bool { - false - } - /// Returns true if the dialects supports specifying null treatment /// as part of a window function's parameter list as opposed /// to after the parameter list. @@ -427,16 +289,6 @@ pub trait Dialect: Debug + Any { false } - /// Returns true if the dialect supports multiple `SET` statements - /// in a single statement. - /// - /// ```sql - /// SET variable = expression [, variable = expression]; - /// ``` - fn supports_comma_separated_set_assignments(&self) -> bool { - false - } - /// Returns true if the dialect supports an `EXCEPT` clause following a /// wildcard in a select list. /// @@ -481,115 +333,6 @@ pub trait Dialect: Debug + Any { self.supports_trailing_commas() } - /// Returns true if the dialect supports trailing commas in the `FROM` clause of a `SELECT` statement. - /// Example: `SELECT 1 FROM T, U, LIMIT 1` - fn supports_from_trailing_commas(&self) -> bool { - false - } - - /// Returns true if the dialect supports trailing commas in the - /// column definitions list of a `CREATE` statement. - /// Example: `CREATE TABLE T (x INT, y TEXT,)` - fn supports_column_definition_trailing_commas(&self) -> bool { - false - } - - /// Returns true if the dialect supports double dot notation for object names - /// - /// Example - /// ```sql - /// SELECT * FROM db_name..table_name - /// ``` - fn supports_object_name_double_dot_notation(&self) -> bool { - false - } - - /// Return true if the dialect supports the STRUCT literal - /// - /// Example - /// ```sql - /// SELECT STRUCT(1 as one, 'foo' as foo, false) - /// ``` - fn supports_struct_literal(&self) -> bool { - false - } - - /// Return true if the dialect supports empty projections in SELECT statements - /// - /// Example - /// ```sql - /// SELECT from table_name - /// ``` - fn supports_empty_projections(&self) -> bool { - false - } - - /// Return true if the dialect supports wildcard expansion on - /// arbitrary expressions in projections. - /// - /// Example: - /// ```sql - /// SELECT STRUCT('foo').* FROM T - /// ``` - fn supports_select_expr_star(&self) -> bool { - false - } - - /// Return true if the dialect supports "FROM-first" selects. - /// - /// Example: - /// ```sql - /// FROM table - /// SELECT * - /// ``` - fn supports_from_first_select(&self) -> bool { - false - } - - /// Return true if the dialect supports pipe operator. - /// - /// Example: - /// ```sql - /// SELECT * - /// FROM table - /// |> limit 1 - /// ``` - /// - /// See - fn supports_pipe_operator(&self) -> bool { - false - } - - /// Does the dialect support MySQL-style `'user'@'host'` grantee syntax? - fn supports_user_host_grantee(&self) -> bool { - false - } - - /// Does the dialect support the `MATCH() AGAINST()` syntax? - fn supports_match_against(&self) -> bool { - false - } - - /// Returns true if the dialect supports an exclude option - /// following a wildcard in the projection section. For example: - /// `SELECT * EXCLUDE col1 FROM tbl`. - /// - /// [Redshift](https://docs.aws.amazon.com/redshift/latest/dg/r_EXCLUDE_list.html) - /// [Snowflake](https://docs.snowflake.com/en/sql-reference/sql/select) - fn supports_select_wildcard_exclude(&self) -> bool { - false - } - - /// Returns true if the dialect supports an exclude option - /// as the last item in the projection section, not necessarily - /// after a wildcard. For example: - /// `SELECT *, c1, c2 EXCLUDE c3 FROM tbl` - /// - /// [Redshift](https://docs.aws.amazon.com/redshift/latest/dg/r_EXCLUDE_list.html) - fn supports_select_exclude(&self) -> bool { - false - } - /// Dialect-specific infix parser override /// /// This method is called to parse the next infix expression. @@ -635,7 +378,7 @@ pub trait Dialect: Debug + Any { } let token = parser.peek_token(); - debug!("get_next_precedence_full() {token:?}"); + debug!("get_next_precedence_full() {:?}", token); match token.token { Token::Word(w) if w.keyword == Keyword::OR => Ok(p!(Or)), Token::Word(w) if w.keyword == Keyword::AND => Ok(p!(And)), @@ -667,27 +410,20 @@ pub trait Dialect: Debug + Any { Token::Word(w) if w.keyword == Keyword::ILIKE => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::RLIKE => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)), _ => Ok(self.prec_unknown()), }, Token::Word(w) if w.keyword == Keyword::IS => Ok(p!(Is)), Token::Word(w) if w.keyword == Keyword::IN => Ok(p!(Between)), Token::Word(w) if w.keyword == Keyword::BETWEEN => Ok(p!(Between)), - Token::Word(w) if w.keyword == Keyword::OVERLAPS => Ok(p!(Between)), Token::Word(w) if w.keyword == Keyword::LIKE => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::ILIKE => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::RLIKE => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)), - Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::OPERATOR => Ok(p!(Between)), Token::Word(w) if w.keyword == Keyword::DIV => Ok(p!(MulDivModOp)), - Token::Period => Ok(p!(Period)), - Token::Assignment - | Token::Eq + Token::Eq | Token::Lt | Token::LtEq | Token::Neq @@ -703,34 +439,18 @@ pub trait Dialect: Debug + Any { | Token::ExclamationMarkDoubleTilde | Token::ExclamationMarkDoubleTildeAsterisk | Token::Spaceship => Ok(p!(Eq)), - Token::Pipe - | Token::QuestionMarkDash - | Token::DoubleSharp - | Token::Overlap - | Token::AmpersandLeftAngleBracket - | Token::AmpersandRightAngleBracket - | Token::QuestionMarkDashVerticalBar - | Token::AmpersandLeftAngleBracketVerticalBar - | Token::VerticalBarAmpersandRightAngleBracket - | Token::TwoWayArrow - | Token::LeftAngleBracketCaret - | Token::RightAngleBracketCaret - | Token::QuestionMarkSharp - | Token::QuestionMarkDoubleVerticalBar - | Token::QuestionPipe - | Token::TildeEqual - | Token::AtSign - | Token::ShiftLeftVerticalBar - | Token::VerticalBarShiftRight => Ok(p!(Pipe)), + Token::Pipe => Ok(p!(Pipe)), Token::Caret | Token::Sharp | Token::ShiftRight | Token::ShiftLeft => Ok(p!(Caret)), Token::Ampersand => Ok(p!(Ampersand)), Token::Plus | Token::Minus => Ok(p!(PlusMinus)), Token::Mul | Token::Div | Token::DuckIntDiv | Token::Mod | Token::StringConcat => { Ok(p!(MulDivModOp)) } - Token::DoubleColon | Token::ExclamationMark | Token::LBracket | Token::CaretAt => { - Ok(p!(DoubleColon)) - } + Token::DoubleColon + | Token::ExclamationMark + | Token::LBracket + | Token::Overlap + | Token::CaretAt => Ok(p!(DoubleColon)), Token::Arrow | Token::LongArrow | Token::HashArrow @@ -742,6 +462,7 @@ pub trait Dialect: Debug + Any { | Token::AtAt | Token::Question | Token::QuestionAnd + | Token::QuestionPipe | Token::CustomBinaryOperator(_) => Ok(p!(PgOther)), _ => Ok(self.prec_unknown()), } @@ -775,7 +496,6 @@ pub trait Dialect: Debug + Any { /// Uses (APPROXIMATELY) as a reference fn prec_value(&self, prec: Precedence) -> u8 { match prec { - Precedence::Period => 100, Precedence::DoubleColon => 50, Precedence::AtTz => 41, Precedence::MulDivModOp => 40, @@ -820,12 +540,6 @@ pub trait Dialect: Debug + Any { false } - /// Returns true if this dialect allows dollar placeholders - /// e.g. `SELECT $var` (SQLite) - fn supports_dollar_placeholder(&self) -> bool { - false - } - /// Does the dialect support with clause in create index statement? /// e.g. `CREATE INDEX idx ON t WITH (key = value, key2)` fn supports_create_index_with_clause(&self) -> bool { @@ -861,17 +575,6 @@ pub trait Dialect: Debug + Any { false } - /// Returns true if the dialect supports `a!` expressions - fn supports_factorial_operator(&self) -> bool { - false - } - - /// Returns true if the dialect supports nested comments - /// e.g. `/* /* nested */ */` - fn supports_nested_comments(&self) -> bool { - false - } - /// Returns true if this dialect supports treating the equals operator `=` within a `SelectItem` /// as an alias assignment operator, rather than a boolean expression. /// For example: the following statements are equivalent for such a dialect: @@ -888,233 +591,21 @@ pub trait Dialect: Debug + Any { false } - /// Returns true if the dialect supports `!a` syntax for boolean `NOT` expressions. - fn supports_bang_not_operator(&self) -> bool { + /// Returns true if the dialect supports the `LISTEN` statement + fn supports_listen(&self) -> bool { false } - /// Returns true if the dialect supports the `LISTEN`, `UNLISTEN` and `NOTIFY` statements - fn supports_listen_notify(&self) -> bool { + /// Returns true if the dialect supports the `NOTIFY` statement + fn supports_notify(&self) -> bool { false } - /// Returns true if the dialect supports the `LOAD DATA` statement - fn supports_load_data(&self) -> bool { - false - } - - /// Returns true if the dialect supports the `LOAD extension` statement - fn supports_load_extension(&self) -> bool { - false - } - - /// Returns true if this dialect expects the `TOP` option + /// Returns true if this dialect expects the the `TOP` option /// before the `ALL`/`DISTINCT` options in a `SELECT` statement. fn supports_top_before_distinct(&self) -> bool { false } - - /// Returns true if the dialect supports boolean literals (`true` and `false`). - /// For example, in MSSQL these are treated as identifiers rather than boolean literals. - fn supports_boolean_literals(&self) -> bool { - true - } - - /// Returns true if this dialect supports the `LIKE 'pattern'` option in - /// a `SHOW` statement before the `IN` option - fn supports_show_like_before_in(&self) -> bool { - false - } - - /// Returns true if this dialect supports the `COMMENT` statement - fn supports_comment_on(&self) -> bool { - false - } - - /// Returns true if the dialect supports the `CREATE TABLE SELECT` statement - fn supports_create_table_select(&self) -> bool { - false - } - - /// Returns true if the dialect supports PartiQL for querying semi-structured data - /// - fn supports_partiql(&self) -> bool { - false - } - - /// Returns true if the specified keyword is reserved and cannot be - /// used as an identifier without special handling like quoting. - fn is_reserved_for_identifier(&self, kw: Keyword) -> bool { - keywords::RESERVED_FOR_IDENTIFIER.contains(&kw) - } - - /// Returns reserved keywords when looking to parse a `TableFactor`. - /// See [Self::supports_from_trailing_commas] - fn get_reserved_keywords_for_table_factor(&self) -> &[Keyword] { - keywords::RESERVED_FOR_TABLE_FACTOR - } - - /// Returns reserved keywords that may prefix a select item expression - /// e.g. `SELECT CONNECT_BY_ROOT name FROM Tbl2` (Snowflake) - fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] { - &[] - } - - /// Returns grantee types that should be treated as identifiers - fn get_reserved_grantees_types(&self) -> &[GranteesType] { - &[] - } - - /// Returns true if this dialect supports the `TABLESAMPLE` option - /// before the table alias option. For example: - /// - /// Table sample before alias: `SELECT * FROM tbl AS t TABLESAMPLE (10)` - /// Table sample after alias: `SELECT * FROM tbl TABLESAMPLE (10) AS t` - /// - /// - fn supports_table_sample_before_alias(&self) -> bool { - false - } - - /// Returns true if this dialect supports the `INSERT INTO ... SET col1 = 1, ...` syntax. - /// - /// MySQL: - fn supports_insert_set(&self) -> bool { - false - } - - /// Does the dialect support table function in insertion? - fn supports_insert_table_function(&self) -> bool { - false - } - - /// Does the dialect support insert formats, e.g. `INSERT INTO ... FORMAT ` - fn supports_insert_format(&self) -> bool { - false - } - - /// Returns true if this dialect supports `SET` statements without an explicit - /// assignment operator such as `=`. For example: `SET SHOWPLAN_XML ON`. - fn supports_set_stmt_without_operator(&self) -> bool { - false - } - - /// Returns true if the specified keyword should be parsed as a column identifier. - /// See [keywords::RESERVED_FOR_COLUMN_ALIAS] - fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { - !keywords::RESERVED_FOR_COLUMN_ALIAS.contains(kw) - } - - /// Returns true if the specified keyword should be parsed as a select item alias. - /// When explicit is true, the keyword is preceded by an `AS` word. Parser is provided - /// to enable looking ahead if needed. - fn is_select_item_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { - explicit || self.is_column_alias(kw, parser) - } - - /// Returns true if the specified keyword should be parsed as a table identifier. - /// See [keywords::RESERVED_FOR_TABLE_ALIAS] - fn is_table_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { - !keywords::RESERVED_FOR_TABLE_ALIAS.contains(kw) - } - - /// Returns true if the specified keyword should be parsed as a table factor alias. - /// When explicit is true, the keyword is preceded by an `AS` word. Parser is provided - /// to enable looking ahead if needed. - fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { - explicit || self.is_table_alias(kw, parser) - } - - /// Returns true if this dialect supports querying historical table data - /// by specifying which version of the data to query. - fn supports_timestamp_versioning(&self) -> bool { - false - } - - /// Returns true if this dialect supports the E'...' syntax for string literals - /// - /// Postgres: - fn supports_string_escape_constant(&self) -> bool { - false - } - - /// Returns true if the dialect supports the table hints in the `FROM` clause. - fn supports_table_hints(&self) -> bool { - false - } - - /// Returns true if this dialect requires a whitespace character after `--` to start a single line comment. - /// - /// MySQL: - /// e.g. UPDATE account SET balance=balance--1 - // WHERE account_id=5752 ^^^ will be interpreted as two minus signs instead of a comment - fn requires_single_line_comment_whitespace(&self) -> bool { - false - } - - /// Returns true if the dialect supports array type definition with brackets with - /// an optional size. For example: - /// ```CREATE TABLE my_table (arr1 INT[], arr2 INT[3])``` - /// ```SELECT x::INT[]``` - fn supports_array_typedef_with_brackets(&self) -> bool { - false - } - /// Returns true if the dialect supports geometric types. - /// - /// Postgres: - /// e.g. @@ circle '((0,0),10)' - fn supports_geometric_types(&self) -> bool { - false - } - - /// Returns true if the dialect supports `ORDER BY ALL`. - /// `ALL` which means all columns of the SELECT clause. - /// - /// For example: ```SELECT * FROM addresses ORDER BY ALL;```. - fn supports_order_by_all(&self) -> bool { - false - } - - /// Returns true if the dialect supports `SET NAMES [COLLATE ]`. - /// - /// - [MySQL](https://dev.mysql.com/doc/refman/8.4/en/set-names.html) - /// - [PostgreSQL](https://www.postgresql.org/docs/17/sql-set.html) - /// - /// Note: Postgres doesn't support the `COLLATE` clause, but we permissively parse it anyway. - fn supports_set_names(&self) -> bool { - false - } - - fn supports_space_separated_column_options(&self) -> bool { - false - } - - /// Returns true if the dialect supports the `USING` clause in an `ALTER COLUMN` statement. - /// Example: - /// ```sql - /// ALTER TABLE tbl ALTER COLUMN col SET DATA TYPE USING ` - /// ``` - fn supports_alter_column_type_using(&self) -> bool { - false - } - - /// Returns true if the dialect supports `ALTER TABLE tbl DROP COLUMN c1, ..., cn` - fn supports_comma_separated_drop_column_list(&self) -> bool { - false - } - - /// Returns true if the dialect considers the specified ident as a function - /// that returns an identifier. Typically used to generate identifiers - /// programmatically. - /// - /// - [Snowflake](https://docs.snowflake.com/en/sql-reference/identifier-literal) - fn is_identifier_generating_function_name( - &self, - _ident: &Ident, - _name_parts: &[ObjectNamePart], - ) -> bool { - false - } } /// This represents the operators for which precedence must be defined @@ -1122,7 +613,6 @@ pub trait Dialect: Debug + Any { /// higher number -> higher precedence #[derive(Debug, Clone, Copy)] pub enum Precedence { - Period, DoubleColon, AtTz, MulDivModOp, @@ -1267,17 +757,6 @@ mod tests { self.0.is_delimited_identifier_start(ch) } - fn is_nested_delimited_identifier_start(&self, ch: char) -> bool { - self.0.is_nested_delimited_identifier_start(ch) - } - - fn peek_nested_delimited_identifier_quotes( - &self, - chars: std::iter::Peekable>, - ) -> Option<(char, Option)> { - self.0.peek_nested_delimited_identifier_quotes(chars) - } - fn identifier_quote_style(&self, identifier: &str) -> Option { self.0.identifier_quote_style(identifier) } @@ -1286,6 +765,13 @@ mod tests { self.0.supports_string_literal_backslash_escape() } + fn is_proper_identifier_inside_quotes( + &self, + chars: std::iter::Peekable>, + ) -> bool { + self.0.is_proper_identifier_inside_quotes(chars) + } + fn supports_filter_during_aggregation(&self) -> bool { self.0.supports_filter_during_aggregation() } diff --git a/src/dialect/mssql.rs b/src/dialect/mssql.rs index 36bd222b..a5ee0bf7 100644 --- a/src/dialect/mssql.rs +++ b/src/dialect/mssql.rs @@ -15,19 +15,7 @@ // specific language governing permissions and limitations // under the License. -use crate::ast::helpers::attached_token::AttachedToken; -use crate::ast::{ - BeginEndStatements, ConditionalStatementBlock, ConditionalStatements, GranteesType, - IfStatement, Statement, TriggerObject, -}; use crate::dialect::Dialect; -use crate::keywords::{self, Keyword}; -use crate::parser::{Parser, ParserError}; -use crate::tokenizer::Token; -#[cfg(not(feature = "std"))] -use alloc::{vec, vec::Vec}; - -const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[Keyword::IF, Keyword::ELSE]; /// A [`Dialect`] for [Microsoft SQL Server](https://www.microsoft.com/en-us/sql-server/) #[derive(Debug)] @@ -52,20 +40,12 @@ impl Dialect for MsSqlDialect { || ch == '_' } - fn identifier_quote_style(&self, _identifier: &str) -> Option { - Some('[') - } - /// SQL Server has `CONVERT(type, value)` instead of `CONVERT(value, type)` /// fn convert_type_before_value(&self) -> bool { true } - fn supports_outer_join_operator(&self) -> bool { - true - } - fn supports_connect_by(&self) -> bool { true } @@ -77,222 +57,4 @@ impl Dialect for MsSqlDialect { fn supports_try_convert(&self) -> bool { true } - - /// In MSSQL, there is no boolean type, and `true` and `false` are valid column names - fn supports_boolean_literals(&self) -> bool { - false - } - - fn supports_named_fn_args_with_colon_operator(&self) -> bool { - true - } - - fn supports_named_fn_args_with_expr_name(&self) -> bool { - true - } - - fn supports_named_fn_args_with_rarrow_operator(&self) -> bool { - false - } - - fn supports_start_transaction_modifier(&self) -> bool { - true - } - - fn supports_end_transaction_modifier(&self) -> bool { - true - } - - /// See: - fn supports_set_stmt_without_operator(&self) -> bool { - true - } - - /// See: - fn supports_timestamp_versioning(&self) -> bool { - true - } - - /// See - fn supports_nested_comments(&self) -> bool { - true - } - - /// See - fn supports_object_name_double_dot_notation(&self) -> bool { - true - } - - /// See - fn get_reserved_grantees_types(&self) -> &[GranteesType] { - &[GranteesType::Public] - } - - fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { - !keywords::RESERVED_FOR_COLUMN_ALIAS.contains(kw) && !RESERVED_FOR_COLUMN_ALIAS.contains(kw) - } - - fn parse_statement(&self, parser: &mut Parser) -> Option> { - if parser.peek_keyword(Keyword::IF) { - Some(self.parse_if_stmt(parser)) - } else if parser.parse_keywords(&[Keyword::CREATE, Keyword::TRIGGER]) { - Some(self.parse_create_trigger(parser, false)) - } else if parser.parse_keywords(&[ - Keyword::CREATE, - Keyword::OR, - Keyword::ALTER, - Keyword::TRIGGER, - ]) { - Some(self.parse_create_trigger(parser, true)) - } else { - None - } - } -} - -impl MsSqlDialect { - /// ```sql - /// IF boolean_expression - /// { sql_statement | statement_block } - /// [ ELSE - /// { sql_statement | statement_block } ] - /// ``` - fn parse_if_stmt(&self, parser: &mut Parser) -> Result { - let if_token = parser.expect_keyword(Keyword::IF)?; - - let condition = parser.parse_expr()?; - - let if_block = if parser.peek_keyword(Keyword::BEGIN) { - let begin_token = parser.expect_keyword(Keyword::BEGIN)?; - let statements = self.parse_statement_list(parser, Some(Keyword::END))?; - let end_token = parser.expect_keyword(Keyword::END)?; - ConditionalStatementBlock { - start_token: AttachedToken(if_token), - condition: Some(condition), - then_token: None, - conditional_statements: ConditionalStatements::BeginEnd(BeginEndStatements { - begin_token: AttachedToken(begin_token), - statements, - end_token: AttachedToken(end_token), - }), - } - } else { - let stmt = parser.parse_statement()?; - ConditionalStatementBlock { - start_token: AttachedToken(if_token), - condition: Some(condition), - then_token: None, - conditional_statements: ConditionalStatements::Sequence { - statements: vec![stmt], - }, - } - }; - - let mut prior_statement_ended_with_semi_colon = false; - while let Token::SemiColon = parser.peek_token_ref().token { - parser.advance_token(); - prior_statement_ended_with_semi_colon = true; - } - - let mut else_block = None; - if parser.peek_keyword(Keyword::ELSE) { - let else_token = parser.expect_keyword(Keyword::ELSE)?; - if parser.peek_keyword(Keyword::BEGIN) { - let begin_token = parser.expect_keyword(Keyword::BEGIN)?; - let statements = self.parse_statement_list(parser, Some(Keyword::END))?; - let end_token = parser.expect_keyword(Keyword::END)?; - else_block = Some(ConditionalStatementBlock { - start_token: AttachedToken(else_token), - condition: None, - then_token: None, - conditional_statements: ConditionalStatements::BeginEnd(BeginEndStatements { - begin_token: AttachedToken(begin_token), - statements, - end_token: AttachedToken(end_token), - }), - }); - } else { - let stmt = parser.parse_statement()?; - else_block = Some(ConditionalStatementBlock { - start_token: AttachedToken(else_token), - condition: None, - then_token: None, - conditional_statements: ConditionalStatements::Sequence { - statements: vec![stmt], - }, - }); - } - } else if prior_statement_ended_with_semi_colon { - parser.prev_token(); - } - - Ok(Statement::If(IfStatement { - if_block, - else_block, - elseif_blocks: Vec::new(), - end_token: None, - })) - } - - /// Parse `CREATE TRIGGER` for [MsSql] - /// - /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-trigger-transact-sql - fn parse_create_trigger( - &self, - parser: &mut Parser, - or_alter: bool, - ) -> Result { - let name = parser.parse_object_name(false)?; - parser.expect_keyword_is(Keyword::ON)?; - let table_name = parser.parse_object_name(false)?; - let period = parser.parse_trigger_period()?; - let events = parser.parse_comma_separated(Parser::parse_trigger_event)?; - - parser.expect_keyword_is(Keyword::AS)?; - let statements = Some(parser.parse_conditional_statements(&[Keyword::END])?); - - Ok(Statement::CreateTrigger { - or_alter, - or_replace: false, - is_constraint: false, - name, - period, - events, - table_name, - referenced_table_name: None, - referencing: Vec::new(), - trigger_object: TriggerObject::Statement, - include_each: false, - condition: None, - exec_body: None, - statements, - characteristics: None, - }) - } - - /// Parse a sequence of statements, optionally separated by semicolon. - /// - /// Stops parsing when reaching EOF or the given keyword. - fn parse_statement_list( - &self, - parser: &mut Parser, - terminal_keyword: Option, - ) -> Result, ParserError> { - let mut stmts = Vec::new(); - loop { - if let Token::EOF = parser.peek_token_ref().token { - break; - } - if let Some(term) = terminal_keyword { - if parser.peek_keyword(term) { - break; - } - } - stmts.push(parser.parse_statement()?); - while let Token::SemiColon = parser.peek_token_ref().token { - parser.advance_token(); - } - } - Ok(stmts) - } } diff --git a/src/dialect/mysql.rs b/src/dialect/mysql.rs index f69e4243..d1bf3334 100644 --- a/src/dialect/mysql.rs +++ b/src/dialect/mysql.rs @@ -25,15 +25,6 @@ use crate::{ parser::{Parser, ParserError}, }; -use super::keywords; - -const RESERVED_FOR_TABLE_ALIAS_MYSQL: &[Keyword] = &[ - Keyword::USE, - Keyword::IGNORE, - Keyword::FORCE, - Keyword::STRAIGHT_JOIN, -]; - /// A [`Dialect`] for [MySQL](https://www.mysql.com/) #[derive(Debug)] pub struct MySqlDialect {} @@ -67,10 +58,6 @@ impl Dialect for MySqlDialect { true } - fn ignores_wildcard_escapes(&self) -> bool { - true - } - fn supports_numeric_prefix(&self) -> bool { true } @@ -110,46 +97,6 @@ impl Dialect for MySqlDialect { fn supports_limit_comma(&self) -> bool { true } - - /// See: - fn supports_create_table_select(&self) -> bool { - true - } - - /// See: - fn supports_insert_set(&self) -> bool { - true - } - - fn supports_user_host_grantee(&self) -> bool { - true - } - - fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool { - explicit - || (!keywords::RESERVED_FOR_TABLE_ALIAS.contains(kw) - && !RESERVED_FOR_TABLE_ALIAS_MYSQL.contains(kw)) - } - - fn supports_table_hints(&self) -> bool { - true - } - - fn requires_single_line_comment_whitespace(&self) -> bool { - true - } - - fn supports_match_against(&self) -> bool { - true - } - - fn supports_set_names(&self) -> bool { - true - } - - fn supports_comma_separated_set_assignments(&self) -> bool { - true - } } /// `LOCK TABLES` @@ -161,7 +108,7 @@ fn parse_lock_tables(parser: &mut Parser) -> Result { // tbl_name [[AS] alias] lock_type fn parse_lock_table(parser: &mut Parser) -> Result { - let table = parser.parse_identifier()?; + let table = parser.parse_identifier(false)?; let alias = parser.parse_optional_alias(&[Keyword::READ, Keyword::WRITE, Keyword::LOW_PRIORITY])?; let lock_type = parse_lock_tables_type(parser)?; diff --git a/src/dialect/postgresql.rs b/src/dialect/postgresql.rs index b2d4014c..c40c826c 100644 --- a/src/dialect/postgresql.rs +++ b/src/dialect/postgresql.rs @@ -28,6 +28,7 @@ // limitations under the License. use log::debug; +use crate::ast::{CommentObject, ObjectName, Statement, UserDefinedTypeRepresentation}; use crate::dialect::{Dialect, Precedence}; use crate::keywords::Keyword; use crate::parser::{Parser, ParserError}; @@ -37,7 +38,6 @@ use crate::tokenizer::Token; #[derive(Debug)] pub struct PostgreSqlDialect {} -const PERIOD_PREC: u8 = 200; const DOUBLE_COLON_PREC: u8 = 140; const BRACKET_PREC: u8 = 130; const COLLATE_PREC: u8 = 120; @@ -104,7 +104,7 @@ impl Dialect for PostgreSqlDialect { fn get_next_precedence(&self, parser: &Parser) -> Option> { let token = parser.peek_token(); - debug!("get_next_precedence() {token:?}"); + debug!("get_next_precedence() {:?}", token); // we only return some custom value here when the behaviour (not merely the numeric value) differs // from the default implementation @@ -135,6 +135,17 @@ impl Dialect for PostgreSqlDialect { } } + fn parse_statement(&self, parser: &mut Parser) -> Option> { + if parser.parse_keyword(Keyword::COMMENT) { + Some(parse_comment(parser)) + } else if parser.parse_keyword(Keyword::CREATE) { + parser.prev_token(); // unconsume the CREATE in case we don't end up parsing anything + parse_create(parser) + } else { + None + } + } + fn supports_filter_during_aggregation(&self) -> bool { true } @@ -145,7 +156,6 @@ impl Dialect for PostgreSqlDialect { fn prec_value(&self, prec: Precedence) -> u8 { match prec { - Precedence::Period => PERIOD_PREC, Precedence::DoubleColon => DOUBLE_COLON_PREC, Precedence::AtTz => AT_TZ_PREC, Precedence::MulDivModOp => MUL_DIV_MOD_OP_PREC, @@ -183,83 +193,82 @@ impl Dialect for PostgreSqlDialect { } /// see - /// see + fn supports_listen(&self) -> bool { + true + } + /// see - fn supports_listen_notify(&self) -> bool { - true - } - - /// see - fn supports_factorial_operator(&self) -> bool { - true - } - - /// see - fn supports_comment_on(&self) -> bool { - true - } - - /// See - fn supports_load_extension(&self) -> bool { - true - } - - /// See - /// - /// Required to support the colon in: - /// ```sql - /// SELECT json_object('a': 'b') - /// ``` - fn supports_named_fn_args_with_colon_operator(&self) -> bool { - true - } - - /// See - /// - /// Required to support the label in: - /// ```sql - /// SELECT json_object('label': 'value') - /// ``` - fn supports_named_fn_args_with_expr_name(&self) -> bool { - true - } - - /// Return true if the dialect supports empty projections in SELECT statements - /// - /// Example - /// ```sql - /// SELECT from table_name - /// ``` - fn supports_empty_projections(&self) -> bool { - true - } - - fn supports_nested_comments(&self) -> bool { - true - } - - fn supports_string_escape_constant(&self) -> bool { - true - } - - fn supports_numeric_literal_underscores(&self) -> bool { - true - } - - /// See: - fn supports_array_typedef_with_brackets(&self) -> bool { - true - } - - fn supports_geometric_types(&self) -> bool { - true - } - - fn supports_set_names(&self) -> bool { - true - } - - fn supports_alter_column_type_using(&self) -> bool { + fn supports_notify(&self) -> bool { true } } + +pub fn parse_comment(parser: &mut Parser) -> Result { + let if_exists = parser.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); + + parser.expect_keyword(Keyword::ON)?; + let token = parser.next_token(); + + let (object_type, object_name) = match token.token { + Token::Word(w) if w.keyword == Keyword::COLUMN => { + let object_name = parser.parse_object_name(false)?; + (CommentObject::Column, object_name) + } + Token::Word(w) if w.keyword == Keyword::TABLE => { + let object_name = parser.parse_object_name(false)?; + (CommentObject::Table, object_name) + } + Token::Word(w) if w.keyword == Keyword::EXTENSION => { + let object_name = parser.parse_object_name(false)?; + (CommentObject::Extension, object_name) + } + _ => parser.expected("comment object_type", token)?, + }; + + parser.expect_keyword(Keyword::IS)?; + let comment = if parser.parse_keyword(Keyword::NULL) { + None + } else { + Some(parser.parse_literal_string()?) + }; + Ok(Statement::Comment { + object_type, + object_name, + comment, + if_exists, + }) +} + +pub fn parse_create(parser: &mut Parser) -> Option> { + let name = parser.maybe_parse(|parser| -> Result { + parser.expect_keyword(Keyword::CREATE)?; + parser.expect_keyword(Keyword::TYPE)?; + let name = parser.parse_object_name(false)?; + parser.expect_keyword(Keyword::AS)?; + parser.expect_keyword(Keyword::ENUM)?; + Ok(name) + }); + + match name { + Ok(name) => name.map(|name| parse_create_type_as_enum(parser, name)), + Err(e) => Some(Err(e)), + } +} + +// https://www.postgresql.org/docs/current/sql-createtype.html +pub fn parse_create_type_as_enum( + parser: &mut Parser, + name: ObjectName, +) -> Result { + if !parser.consume_token(&Token::LParen) { + return parser.expected("'(' after CREATE TYPE AS ENUM", parser.peek_token()); + } + + let labels = parser.parse_comma_separated0(|p| p.parse_identifier(false), Token::RParen)?; + parser.expect_token(&Token::RParen)?; + + Ok(Statement::CreateType { + name, + representation: UserDefinedTypeRepresentation::Enum { labels }, + }) +} diff --git a/src/dialect/redshift.rs b/src/dialect/redshift.rs index 8ffed98a..4d077384 100644 --- a/src/dialect/redshift.rs +++ b/src/dialect/redshift.rs @@ -32,63 +32,31 @@ pub struct RedshiftSqlDialect {} // in the Postgres dialect, the query will be parsed as an array, while in the Redshift dialect it will // be a json path impl Dialect for RedshiftSqlDialect { - /// Determine if a character starts a potential nested quoted identifier. - /// Example: RedShift supports the following quote styles to all mean the same thing: - /// ```sql - /// SELECT 1 AS foo; - /// SELECT 1 AS "foo"; - /// SELECT 1 AS [foo]; - /// SELECT 1 AS ["foo"]; - /// ``` - fn is_nested_delimited_identifier_start(&self, ch: char) -> bool { - ch == '[' + fn is_delimited_identifier_start(&self, ch: char) -> bool { + ch == '"' || ch == '[' } - /// Only applicable whenever [`Self::is_nested_delimited_identifier_start`] returns true - /// If the next sequence of tokens potentially represent a nested identifier, then this method - /// returns a tuple containing the outer quote style, and if present, the inner (nested) quote style. - /// - /// Example (Redshift): - /// ```text - /// `["foo"]` => Some(`[`, Some(`"`)) - /// `[foo]` => Some(`[`, None) - /// `[0]` => None - /// `"foo"` => None - /// ``` - fn peek_nested_delimited_identifier_quotes( - &self, - mut chars: Peekable>, - ) -> Option<(char, Option)> { - if chars.peek() != Some(&'[') { - return None; - } - + /// Determine if quoted characters are proper for identifier + /// It's needed to distinguish treating square brackets as quotes from + /// treating them as json path. If there is identifier then we assume + /// there is no json path. + fn is_proper_identifier_inside_quotes(&self, mut chars: Peekable>) -> bool { chars.next(); - let mut not_white_chars = chars.skip_while(|ch| ch.is_whitespace()).peekable(); - if let Some(&ch) = not_white_chars.peek() { - if ch == '"' { - return Some(('[', Some('"'))); - } - if self.is_identifier_start(ch) { - return Some(('[', None)); - } + return self.is_identifier_start(ch); } - - None + false } fn is_identifier_start(&self, ch: char) -> bool { - // Extends Postgres dialect with sharp and UTF-8 multibyte chars - // https://docs.aws.amazon.com/redshift/latest/dg/r_names.html - PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#' || !ch.is_ascii() + // Extends Postgres dialect with sharp + PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#' } fn is_identifier_part(&self, ch: char) -> bool { - // Extends Postgres dialect with sharp and UTF-8 multibyte chars - // https://docs.aws.amazon.com/redshift/latest/dg/r_names.html - PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#' || !ch.is_ascii() + // Extends Postgres dialect with sharp + PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#' } /// redshift has `CONVERT(type, value)` instead of `CONVERT(value, type)` @@ -106,37 +74,4 @@ impl Dialect for RedshiftSqlDialect { fn supports_top_before_distinct(&self) -> bool { true } - - /// Redshift supports PartiQL: - fn supports_partiql(&self) -> bool { - true - } - - fn supports_string_escape_constant(&self) -> bool { - true - } - - fn supports_geometric_types(&self) -> bool { - true - } - - fn supports_array_typedef_with_brackets(&self) -> bool { - true - } - - fn allow_extract_single_quotes(&self) -> bool { - true - } - - fn supports_string_literal_backslash_escape(&self) -> bool { - true - } - - fn supports_select_wildcard_exclude(&self) -> bool { - true - } - - fn supports_select_exclude(&self) -> bool { - true - } } diff --git a/src/dialect/snowflake.rs b/src/dialect/snowflake.rs index 3b1eff39..d9331d95 100644 --- a/src/dialect/snowflake.rs +++ b/src/dialect/snowflake.rs @@ -17,23 +17,20 @@ #[cfg(not(feature = "std"))] use crate::alloc::string::ToString; -use crate::ast::helpers::key_value_options::{KeyValueOption, KeyValueOptionType, KeyValueOptions}; use crate::ast::helpers::stmt_create_table::CreateTableBuilder; use crate::ast::helpers::stmt_data_loading::{ - FileStagingCommand, StageLoadSelectItem, StageLoadSelectItemKind, StageParamsObject, + DataLoadingOption, DataLoadingOptionType, DataLoadingOptions, StageLoadSelectItem, + StageParamsObject, }; use crate::ast::{ - ColumnOption, ColumnPolicy, ColumnPolicyProperty, CopyIntoSnowflakeKind, Ident, - IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind, - IdentityPropertyOrder, ObjectName, ObjectNamePart, RowAccessPolicy, ShowObjects, SqlOption, - Statement, TagsColumnOption, WrappedCollection, + ColumnOption, ColumnPolicy, ColumnPolicyProperty, Ident, IdentityParameters, IdentityProperty, + IdentityPropertyFormatKind, IdentityPropertyKind, IdentityPropertyOrder, ObjectName, + RowAccessPolicy, Statement, TagsColumnOption, WrappedCollection, }; use crate::dialect::{Dialect, Precedence}; use crate::keywords::Keyword; -use crate::parser::{IsOptional, Parser, ParserError}; -use crate::tokenizer::{Token, Word}; -#[cfg(not(feature = "std"))] -use alloc::boxed::Box; +use crate::parser::{Parser, ParserError}; +use crate::tokenizer::Token; #[cfg(not(feature = "std"))] use alloc::string::String; #[cfg(not(feature = "std"))] @@ -41,10 +38,6 @@ use alloc::vec::Vec; #[cfg(not(feature = "std"))] use alloc::{format, vec}; -use super::keywords::RESERVED_FOR_IDENTIFIER; -use sqlparser::ast::StorageSerializationPolicy; - -const RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR: [Keyword; 1] = [Keyword::CONNECT_BY_ROOT]; /// A [`Dialect`] for [Snowflake](https://www.snowflake.com/) #[derive(Debug, Default)] pub struct SnowflakeDialect; @@ -59,18 +52,6 @@ impl Dialect for SnowflakeDialect { true } - fn supports_from_trailing_commas(&self) -> bool { - true - } - - // Snowflake supports double-dot notation when the schema name is not specified - // In this case the default PUBLIC schema is used - // - // see https://docs.snowflake.com/en/sql-reference/name-resolution#resolution-when-schema-omitted-double-dot-notation - fn supports_object_name_double_dot_notation(&self) -> bool { - true - } - fn is_identifier_part(&self, ch: char) -> bool { ch.is_ascii_lowercase() || ch.is_ascii_uppercase() @@ -88,20 +69,10 @@ impl Dialect for SnowflakeDialect { true } - /// See - fn supports_outer_join_operator(&self) -> bool { - true - } - fn supports_connect_by(&self) -> bool { true } - /// See - fn supports_execute_immediate(&self) -> bool { - true - } - fn supports_match_recognize(&self) -> bool { true } @@ -125,26 +96,7 @@ impl Dialect for SnowflakeDialect { true } - /// See [doc](https://docs.snowflake.com/en/sql-reference/sql/comment) - fn supports_comment_on(&self) -> bool { - true - } - fn parse_statement(&self, parser: &mut Parser) -> Option> { - if parser.parse_keyword(Keyword::BEGIN) { - return Some(parser.parse_begin_exception_end()); - } - - if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) { - // ALTER SESSION - let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) { - Some(Keyword::SET) => true, - Some(Keyword::UNSET) => false, - _ => return Some(parser.expected("SET or UNSET", parser.peek_token())), - }; - return Some(parse_alter_session(parser, set)); - } - if parser.parse_keyword(Keyword::CREATE) { // possibly CREATE STAGE //[ OR REPLACE ] @@ -159,19 +111,16 @@ impl Dialect for SnowflakeDialect { let mut temporary = false; let mut volatile = false; let mut transient = false; - let mut iceberg = false; match parser.parse_one_of_keywords(&[ Keyword::TEMP, Keyword::TEMPORARY, Keyword::VOLATILE, Keyword::TRANSIENT, - Keyword::ICEBERG, ]) { Some(Keyword::TEMP | Keyword::TEMPORARY) => temporary = true, Some(Keyword::VOLATILE) => volatile = true, Some(Keyword::TRANSIENT) => transient = true, - Some(Keyword::ICEBERG) => iceberg = true, _ => {} } @@ -180,7 +129,7 @@ impl Dialect for SnowflakeDialect { return Some(parse_create_stage(or_replace, temporary, parser)); } else if parser.parse_keyword(Keyword::TABLE) { return Some(parse_create_table( - or_replace, global, temporary, volatile, transient, iceberg, parser, + or_replace, global, temporary, volatile, transient, parser, )); } else { // need to go back with the cursor @@ -201,28 +150,6 @@ impl Dialect for SnowflakeDialect { return Some(parse_copy_into(parser)); } - if let Some(kw) = parser.parse_one_of_keywords(&[ - Keyword::LIST, - Keyword::LS, - Keyword::REMOVE, - Keyword::RM, - ]) { - return Some(parse_file_staging_command(kw, parser)); - } - - if parser.parse_keyword(Keyword::SHOW) { - let terse = parser.parse_keyword(Keyword::TERSE); - if parser.parse_keyword(Keyword::OBJECTS) { - return Some(parse_show_objects(terse, parser)); - } - //Give back Keyword::TERSE - if terse { - parser.prev_token(); - } - //Give back Keyword::SHOW - parser.prev_token(); - } - None } @@ -276,244 +203,16 @@ impl Dialect for SnowflakeDialect { fn allow_extract_single_quotes(&self) -> bool { true } - - /// Snowflake expects the `LIKE` option before the `IN` option, - /// for example: - fn supports_show_like_before_in(&self) -> bool { - true - } - - fn supports_left_associative_joins_without_parens(&self) -> bool { - false - } - - fn is_reserved_for_identifier(&self, kw: Keyword) -> bool { - // Unreserve some keywords that Snowflake accepts as identifiers - // See: https://docs.snowflake.com/en/sql-reference/reserved-keywords - if matches!(kw, Keyword::INTERVAL) { - false - } else { - RESERVED_FOR_IDENTIFIER.contains(&kw) - } - } - - fn supports_partiql(&self) -> bool { - true - } - - fn is_column_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool { - match kw { - // The following keywords can be considered an alias as long as - // they are not followed by other tokens that may change their meaning - // e.g. `SELECT * EXCEPT (col1) FROM tbl` - Keyword::EXCEPT - // e.g. `SELECT 1 LIMIT 5` - | Keyword::LIMIT - // e.g. `SELECT 1 OFFSET 5 ROWS` - | Keyword::OFFSET - // e.g. `INSERT INTO t SELECT 1 RETURNING *` - | Keyword::RETURNING if !matches!(parser.peek_token_ref().token, Token::Comma | Token::EOF) => - { - false - } - - // `FETCH` can be considered an alias as long as it's not followed by `FIRST`` or `NEXT` - // which would give it a different meanings, for example: - // `SELECT 1 FETCH FIRST 10 ROWS` - not an alias - // `SELECT 1 FETCH 10` - not an alias - Keyword::FETCH if parser.peek_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]).is_some() - || matches!(parser.peek_token().token, Token::Number(_, _)) => - { - false - } - - // Reserved keywords by the Snowflake dialect, which seem to be less strictive - // than what is listed in `keywords::RESERVED_FOR_COLUMN_ALIAS`. The following - // keywords were tested with the this statement: `SELECT 1 `. - Keyword::FROM - | Keyword::GROUP - | Keyword::HAVING - | Keyword::INTERSECT - | Keyword::INTO - | Keyword::MINUS - | Keyword::ORDER - | Keyword::SELECT - | Keyword::UNION - | Keyword::WHERE - | Keyword::WITH => false, - - // Any other word is considered an alias - _ => true, - } - } - - fn is_table_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool { - match kw { - // The following keywords can be considered an alias as long as - // they are not followed by other tokens that may change their meaning - Keyword::LIMIT - | Keyword::RETURNING - | Keyword::INNER - | Keyword::USING - | Keyword::PIVOT - | Keyword::UNPIVOT - | Keyword::EXCEPT - | Keyword::MATCH_RECOGNIZE - | Keyword::OFFSET - if !matches!(parser.peek_token_ref().token, Token::SemiColon | Token::EOF) => - { - false - } - - // `FETCH` can be considered an alias as long as it's not followed by `FIRST`` or `NEXT` - // which would give it a different meanings, for example: - // `SELECT * FROM tbl FETCH FIRST 10 ROWS` - not an alias - // `SELECT * FROM tbl FETCH 10` - not an alias - Keyword::FETCH - if parser - .peek_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]) - .is_some() - || matches!(parser.peek_token().token, Token::Number(_, _)) => - { - false - } - - // All sorts of join-related keywords can be considered aliases unless additional - // keywords change their meaning. - Keyword::RIGHT | Keyword::LEFT | Keyword::SEMI | Keyword::ANTI - if parser - .peek_one_of_keywords(&[Keyword::JOIN, Keyword::OUTER]) - .is_some() => - { - false - } - Keyword::GLOBAL if parser.peek_keyword(Keyword::FULL) => false, - - // Reserved keywords by the Snowflake dialect, which seem to be less strictive - // than what is listed in `keywords::RESERVED_FOR_TABLE_ALIAS`. The following - // keywords were tested with the this statement: `SELECT .* FROM tbl `. - Keyword::WITH - | Keyword::ORDER - | Keyword::SELECT - | Keyword::WHERE - | Keyword::GROUP - | Keyword::HAVING - | Keyword::LATERAL - | Keyword::UNION - | Keyword::INTERSECT - | Keyword::MINUS - | Keyword::ON - | Keyword::JOIN - | Keyword::INNER - | Keyword::CROSS - | Keyword::FULL - | Keyword::LEFT - | Keyword::RIGHT - | Keyword::NATURAL - | Keyword::USING - | Keyword::ASOF - | Keyword::MATCH_CONDITION - | Keyword::SET - | Keyword::QUALIFY - | Keyword::FOR - | Keyword::START - | Keyword::CONNECT - | Keyword::SAMPLE - | Keyword::TABLESAMPLE - | Keyword::FROM => false, - - // Any other word is considered an alias - _ => true, - } - } - - /// See: - fn supports_timestamp_versioning(&self) -> bool { - true - } - - /// See: - fn supports_group_by_expr(&self) -> bool { - true - } - - /// See: - fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] { - &RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR - } - - fn supports_space_separated_column_options(&self) -> bool { - true - } - - fn supports_comma_separated_drop_column_list(&self) -> bool { - true - } - - fn is_identifier_generating_function_name( - &self, - ident: &Ident, - name_parts: &[ObjectNamePart], - ) -> bool { - ident.quote_style.is_none() - && ident.value.to_lowercase() == "identifier" - && !name_parts - .iter() - .any(|p| matches!(p, ObjectNamePart::Function(_))) - } - - // For example: `SELECT IDENTIFIER('alias1').* FROM tbl AS alias1` - fn supports_select_expr_star(&self) -> bool { - true - } - - fn supports_select_wildcard_exclude(&self) -> bool { - true - } -} - -fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result { - let stage = parse_snowflake_stage_name(parser)?; - let pattern = if parser.parse_keyword(Keyword::PATTERN) { - parser.expect_token(&Token::Eq)?; - Some(parser.parse_literal_string()?) - } else { - None - }; - - match kw { - Keyword::LIST | Keyword::LS => Ok(Statement::List(FileStagingCommand { stage, pattern })), - Keyword::REMOVE | Keyword::RM => { - Ok(Statement::Remove(FileStagingCommand { stage, pattern })) - } - _ => Err(ParserError::ParserError( - "unexpected stage command, expecting LIST, LS, REMOVE or RM".to_string(), - )), - } -} - -/// Parse snowflake alter session. -/// -fn parse_alter_session(parser: &mut Parser, set: bool) -> Result { - let session_options = parse_session_options(parser, set)?; - Ok(Statement::AlterSession { - set, - session_params: KeyValueOptions { - options: session_options, - }, - }) } /// Parse snowflake create table statement. /// -/// pub fn parse_create_table( or_replace: bool, global: Option, temporary: bool, volatile: bool, transient: bool, - iceberg: bool, parser: &mut Parser, ) -> Result { let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); @@ -525,7 +224,6 @@ pub fn parse_create_table( .temporary(temporary) .transient(transient) .volatile(volatile) - .iceberg(iceberg) .global(global) .hive_formats(Some(Default::default())); @@ -535,22 +233,18 @@ pub fn parse_create_table( // "CREATE TABLE x COPY GRANTS (c INT)" and "CREATE TABLE x (c INT) COPY GRANTS" are both // accepted by Snowflake - let mut plain_options = vec![]; - loop { let next_token = parser.next_token(); match &next_token.token { Token::Word(word) => match word.keyword { Keyword::COPY => { - parser.expect_keyword_is(Keyword::GRANTS)?; + parser.expect_keyword(Keyword::GRANTS)?; builder = builder.copy_grants(true); } Keyword::COMMENT => { // Rewind the COMMENT keyword parser.prev_token(); - if let Some(comment_def) = parser.parse_optional_inline_comment()? { - plain_options.push(SqlOption::Comment(comment_def)) - } + builder = builder.comment(parser.parse_optional_inline_comment()?); } Keyword::AS => { let query = parser.parse_query()?; @@ -568,10 +262,10 @@ pub fn parse_create_table( break; } Keyword::CLUSTER => { - parser.expect_keyword_is(Keyword::BY)?; + parser.expect_keyword(Keyword::BY)?; parser.expect_token(&Token::LParen)?; let cluster_by = Some(WrappedCollection::Parentheses( - parser.parse_comma_separated(|p| p.parse_expr())?, + parser.parse_comma_separated(|p| p.parse_identifier(false))?, )); parser.expect_token(&Token::RParen)?; @@ -631,16 +325,16 @@ pub fn parse_create_table( parser.prev_token(); } Keyword::AGGREGATION => { - parser.expect_keyword_is(Keyword::POLICY)?; + parser.expect_keyword(Keyword::POLICY)?; let aggregation_policy = parser.parse_object_name(false)?; builder = builder.with_aggregation_policy(Some(aggregation_policy)); } Keyword::ROW => { parser.expect_keywords(&[Keyword::ACCESS, Keyword::POLICY])?; let policy = parser.parse_object_name(false)?; - parser.expect_keyword_is(Keyword::ON)?; + parser.expect_keyword(Keyword::ON)?; parser.expect_token(&Token::LParen)?; - let columns = parser.parse_comma_separated(|p| p.parse_identifier())?; + let columns = parser.parse_comma_separated(|p| p.parse_identifier(false))?; parser.expect_token(&Token::RParen)?; builder = @@ -652,35 +346,6 @@ pub fn parse_create_table( parser.expect_token(&Token::RParen)?; builder = builder.with_tags(Some(tags)); } - Keyword::ON if parser.parse_keyword(Keyword::COMMIT) => { - let on_commit = Some(parser.parse_create_table_on_commit()?); - builder = builder.on_commit(on_commit); - } - Keyword::EXTERNAL_VOLUME => { - parser.expect_token(&Token::Eq)?; - builder.external_volume = Some(parser.parse_literal_string()?); - } - Keyword::CATALOG => { - parser.expect_token(&Token::Eq)?; - builder.catalog = Some(parser.parse_literal_string()?); - } - Keyword::BASE_LOCATION => { - parser.expect_token(&Token::Eq)?; - builder.base_location = Some(parser.parse_literal_string()?); - } - Keyword::CATALOG_SYNC => { - parser.expect_token(&Token::Eq)?; - builder.catalog_sync = Some(parser.parse_literal_string()?); - } - Keyword::STORAGE_SERIALIZATION_POLICY => { - parser.expect_token(&Token::Eq)?; - - builder.storage_serialization_policy = - Some(parse_storage_serialization_policy(parser)?); - } - Keyword::IF if parser.parse_keywords(&[Keyword::NOT, Keyword::EXISTS]) => { - builder = builder.if_not_exists(true); - } _ => { return parser.expected("end of statement", next_token); } @@ -714,37 +379,10 @@ pub fn parse_create_table( } } } - let table_options = if !plain_options.is_empty() { - crate::ast::CreateTableOptions::Plain(plain_options) - } else { - crate::ast::CreateTableOptions::None - }; - - builder = builder.table_options(table_options); - - if iceberg && builder.base_location.is_none() { - return Err(ParserError::ParserError( - "BASE_LOCATION is required for ICEBERG tables".to_string(), - )); - } Ok(builder.build()) } -pub fn parse_storage_serialization_policy( - parser: &mut Parser, -) -> Result { - let next_token = parser.next_token(); - match &next_token.token { - Token::Word(w) => match w.keyword { - Keyword::COMPATIBLE => Ok(StorageSerializationPolicy::Compatible), - Keyword::OPTIMIZED => Ok(StorageSerializationPolicy::Optimized), - _ => parser.expected("storage_serialization_policy", next_token), - }, - _ => parser.expected("storage_serialization_policy", next_token), - } -} - pub fn parse_create_stage( or_replace: bool, temporary: bool, @@ -782,7 +420,10 @@ pub fn parse_create_stage( // [ comment ] if parser.parse_keyword(Keyword::COMMENT) { parser.expect_token(&Token::Eq)?; - comment = Some(parser.parse_comment_value()?); + comment = Some(match parser.next_token().token { + Token::SingleQuotedString(word) => Ok(word), + _ => parser.expected("a comment statement", parser.peek_token()), + }?) } Ok(Statement::CreateStage { @@ -791,13 +432,13 @@ pub fn parse_create_stage( if_not_exists, name, stage_params, - directory_table_params: KeyValueOptions { + directory_table_params: DataLoadingOptions { options: directory_table_params, }, - file_format: KeyValueOptions { + file_format: DataLoadingOptions { options: file_format, }, - copy_options: KeyValueOptions { + copy_options: DataLoadingOptions { options: copy_options, }, comment, @@ -808,7 +449,7 @@ pub fn parse_stage_name_identifier(parser: &mut Parser) -> Result break, + Token::Whitespace(_) => break, Token::Period => { parser.prev_token(); break; @@ -821,8 +462,7 @@ pub fn parse_stage_name_identifier(parser: &mut Parser) -> Result ident.push('~'), Token::Mod => ident.push('%'), Token::Div => ident.push('/'), - Token::Plus => ident.push('+'), - Token::Word(w) => ident.push_str(&w.to_string()), + Token::Word(w) => ident.push_str(&w.value), _ => return parser.expected("stage name identifier", parser.peek_token()), } } @@ -840,7 +480,7 @@ pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result { parser.prev_token(); @@ -849,70 +489,40 @@ pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result` -/// and `COPY INTO ` which have different syntax. pub fn parse_copy_into(parser: &mut Parser) -> Result { - let kind = match parser.peek_token().token { - // Indicates an internal stage - Token::AtSign => CopyIntoSnowflakeKind::Location, - // Indicates an external stage, i.e. s3://, gcs:// or azure:// - Token::SingleQuotedString(s) if s.contains("://") => CopyIntoSnowflakeKind::Location, - _ => CopyIntoSnowflakeKind::Table, - }; - - let mut files: Vec = vec![]; - let mut from_transformations: Option> = None; - let mut from_stage_alias = None; - let mut from_stage = None; - let mut stage_params = StageParamsObject { - url: None, - encryption: KeyValueOptions { options: vec![] }, - endpoint: None, - storage_integration: None, - credentials: KeyValueOptions { options: vec![] }, - }; - let mut from_query = None; - let mut partition = None; - let mut file_format = Vec::new(); - let mut pattern = None; - let mut validation_mode = None; - let mut copy_options = Vec::new(); - let into: ObjectName = parse_snowflake_stage_name(parser)?; - if kind == CopyIntoSnowflakeKind::Location { - stage_params = parse_stage_params(parser)?; - } + let mut files: Vec = vec![]; + let mut from_transformations: Option> = None; + let from_stage_alias; + let from_stage: ObjectName; + let stage_params: StageParamsObject; - let into_columns = match &parser.peek_token().token { - Token::LParen => Some(parser.parse_parenthesized_column_list(IsOptional::Optional, true)?), - _ => None, - }; - - parser.expect_keyword_is(Keyword::FROM)?; + parser.expect_keyword(Keyword::FROM)?; + // check if data load transformations are present match parser.next_token().token { - Token::LParen if kind == CopyIntoSnowflakeKind::Table => { - // Data load with transformations - parser.expect_keyword_is(Keyword::SELECT)?; + Token::LParen => { + // data load with transformations + parser.expect_keyword(Keyword::SELECT)?; from_transformations = parse_select_items_for_data_load(parser)?; - parser.expect_keyword_is(Keyword::FROM)?; - from_stage = Some(parse_snowflake_stage_name(parser)?); + parser.expect_keyword(Keyword::FROM)?; + from_stage = parse_snowflake_stage_name(parser)?; stage_params = parse_stage_params(parser)?; - // Parse an optional alias - from_stage_alias = parser - .maybe_parse_table_alias()? - .map(|table_alias| table_alias.name); - parser.expect_token(&Token::RParen)?; - } - Token::LParen if kind == CopyIntoSnowflakeKind::Location => { - // Data unload with a query - from_query = Some(parser.parse_query()?); + // as + from_stage_alias = if parser.parse_keyword(Keyword::AS) { + Some(match parser.next_token().token { + Token::Word(w) => Ok(Ident::new(w.value)), + _ => parser.expected("stage alias", parser.peek_token()), + }?) + } else { + None + }; parser.expect_token(&Token::RParen)?; } _ => { parser.prev_token(); - from_stage = Some(parse_snowflake_stage_name(parser)?); + from_stage = parse_snowflake_stage_name(parser)?; stage_params = parse_stage_params(parser)?; // as @@ -925,180 +535,167 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result { None }; } - } + }; - loop { - // FILE_FORMAT - if parser.parse_keyword(Keyword::FILE_FORMAT) { - parser.expect_token(&Token::Eq)?; - file_format = parse_parentheses_options(parser)?; - // PARTITION BY - } else if parser.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) { - partition = Some(Box::new(parser.parse_expr()?)) - // FILES - } else if parser.parse_keyword(Keyword::FILES) { - parser.expect_token(&Token::Eq)?; - parser.expect_token(&Token::LParen)?; - let mut continue_loop = true; - while continue_loop { - continue_loop = false; - let next_token = parser.next_token(); - match next_token.token { - Token::SingleQuotedString(s) => files.push(s), - _ => parser.expected("file token", next_token)?, - }; - if parser.next_token().token.eq(&Token::Comma) { - continue_loop = true; - } else { - parser.prev_token(); // not a comma, need to go back - } - } - parser.expect_token(&Token::RParen)?; - // PATTERN - } else if parser.parse_keyword(Keyword::PATTERN) { - parser.expect_token(&Token::Eq)?; + // [ files ] + if parser.parse_keyword(Keyword::FILES) { + parser.expect_token(&Token::Eq)?; + parser.expect_token(&Token::LParen)?; + let mut continue_loop = true; + while continue_loop { + continue_loop = false; let next_token = parser.next_token(); - pattern = Some(match next_token.token { - Token::SingleQuotedString(s) => s, - _ => parser.expected("pattern", next_token)?, - }); - // VALIDATION MODE - } else if parser.parse_keyword(Keyword::VALIDATION_MODE) { - parser.expect_token(&Token::Eq)?; - validation_mode = Some(parser.next_token().token.to_string()); - // COPY OPTIONS - } else if parser.parse_keyword(Keyword::COPY_OPTIONS) { - parser.expect_token(&Token::Eq)?; - copy_options = parse_parentheses_options(parser)?; - } else { - match parser.next_token().token { - Token::SemiColon | Token::EOF => break, - Token::Comma => continue, - // In `COPY INTO ` the copy options do not have a shared key - // like in `COPY INTO
` - Token::Word(key) => copy_options.push(parse_option(parser, key)?), - _ => return parser.expected("another copy option, ; or EOF'", parser.peek_token()), + match next_token.token { + Token::SingleQuotedString(s) => files.push(s), + _ => parser.expected("file token", next_token)?, + }; + if parser.next_token().token.eq(&Token::Comma) { + continue_loop = true; + } else { + parser.prev_token(); // not a comma, need to go back } } + parser.expect_token(&Token::RParen)?; + } + + // [ pattern ] + let mut pattern = None; + if parser.parse_keyword(Keyword::PATTERN) { + parser.expect_token(&Token::Eq)?; + let next_token = parser.next_token(); + pattern = Some(match next_token.token { + Token::SingleQuotedString(s) => s, + _ => parser.expected("pattern", next_token)?, + }); + } + + // [ file_format] + let mut file_format = Vec::new(); + if parser.parse_keyword(Keyword::FILE_FORMAT) { + parser.expect_token(&Token::Eq)?; + file_format = parse_parentheses_options(parser)?; + } + + // [ copy_options ] + let mut copy_options = Vec::new(); + if parser.parse_keyword(Keyword::COPY_OPTIONS) { + parser.expect_token(&Token::Eq)?; + copy_options = parse_parentheses_options(parser)?; + } + + // [ VALIDATION_MODE ] + let mut validation_mode = None; + if parser.parse_keyword(Keyword::VALIDATION_MODE) { + parser.expect_token(&Token::Eq)?; + validation_mode = Some(parser.next_token().token.to_string()); } Ok(Statement::CopyIntoSnowflake { - kind, into, - into_columns, - from_obj: from_stage, - from_obj_alias: from_stage_alias, + from_stage, + from_stage_alias, stage_params, from_transformations, - from_query, files: if files.is_empty() { None } else { Some(files) }, pattern, - file_format: KeyValueOptions { + file_format: DataLoadingOptions { options: file_format, }, - copy_options: KeyValueOptions { + copy_options: DataLoadingOptions { options: copy_options, }, validation_mode, - partition, }) } fn parse_select_items_for_data_load( parser: &mut Parser, -) -> Result>, ParserError> { - let mut select_items: Vec = vec![]; +) -> Result>, ParserError> { + // [.]$[.] [ , [.]$[.] ... ] + let mut select_items: Vec = vec![]; loop { - match parser.maybe_parse(parse_select_item_for_data_load)? { - // [.]$[.] [ , [.]$[.] ... ] - Some(item) => select_items.push(StageLoadSelectItemKind::StageLoadSelectItem(item)), - // Fallback, try to parse a standard SQL select item - None => select_items.push(StageLoadSelectItemKind::SelectItem( - parser.parse_select_item()?, - )), - } - if matches!(parser.peek_token_ref().token, Token::Comma) { - parser.advance_token(); - } else { - break; - } - } - Ok(Some(select_items)) -} + let mut alias: Option = None; + let mut file_col_num: i32 = 0; + let mut element: Option = None; + let mut item_as: Option = None; -fn parse_select_item_for_data_load( - parser: &mut Parser, -) -> Result { - let mut alias: Option = None; - let mut file_col_num: i32 = 0; - let mut element: Option = None; - let mut item_as: Option = None; - - let next_token = parser.next_token(); - match next_token.token { - Token::Placeholder(w) => { - file_col_num = w.to_string().split_off(1).parse::().map_err(|e| { - ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}")) - })?; - Ok(()) - } - Token::Word(w) => { - alias = Some(Ident::new(w.value)); - Ok(()) - } - _ => parser.expected("alias or file_col_num", next_token), - }?; - - if alias.is_some() { - parser.expect_token(&Token::Period)?; - // now we get col_num token - let col_num_token = parser.next_token(); - match col_num_token.token { + let next_token = parser.next_token(); + match next_token.token { Token::Placeholder(w) => { file_col_num = w.to_string().split_off(1).parse::().map_err(|e| { ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}")) })?; Ok(()) } - _ => parser.expected("file_col_num", col_num_token), + Token::Word(w) => { + alias = Some(Ident::new(w.value)); + Ok(()) + } + _ => parser.expected("alias or file_col_num", next_token), }?; - } - // try extracting optional element - match parser.next_token().token { - Token::Colon => { - // parse element - element = Some(Ident::new(match parser.next_token().token { - Token::Word(w) => Ok(w.value), - _ => parser.expected("file_col_num", parser.peek_token()), - }?)); + if alias.is_some() { + parser.expect_token(&Token::Period)?; + // now we get col_num token + let col_num_token = parser.next_token(); + match col_num_token.token { + Token::Placeholder(w) => { + file_col_num = w.to_string().split_off(1).parse::().map_err(|e| { + ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}")) + })?; + Ok(()) + } + _ => parser.expected("file_col_num", col_num_token), + }?; } - _ => { - // element not present move back - parser.prev_token(); + + // try extracting optional element + match parser.next_token().token { + Token::Colon => { + // parse element + element = Some(Ident::new(match parser.next_token().token { + Token::Word(w) => Ok(w.value), + _ => parser.expected("file_col_num", parser.peek_token()), + }?)); + } + _ => { + // element not present move back + parser.prev_token(); + } + } + + // as + if parser.parse_keyword(Keyword::AS) { + item_as = Some(match parser.next_token().token { + Token::Word(w) => Ok(Ident::new(w.value)), + _ => parser.expected("column item alias", parser.peek_token()), + }?); + } + + select_items.push(StageLoadSelectItem { + alias, + file_col_num, + element, + item_as, + }); + + match parser.next_token().token { + Token::Comma => { + // continue + } + _ => { + parser.prev_token(); // need to move back + break; + } } } - - // as - if parser.parse_keyword(Keyword::AS) { - item_as = Some(match parser.next_token().token { - Token::Word(w) => Ok(Ident::new(w.value)), - _ => parser.expected("column item alias", parser.peek_token()), - }?); - } - - Ok(StageLoadSelectItem { - alias, - file_col_num, - element, - item_as, - }) + Ok(Some(select_items)) } fn parse_stage_params(parser: &mut Parser) -> Result { let (mut url, mut storage_integration, mut endpoint) = (None, None, None); - let mut encryption: KeyValueOptions = KeyValueOptions { options: vec![] }; - let mut credentials: KeyValueOptions = KeyValueOptions { options: vec![] }; + let mut encryption: DataLoadingOptions = DataLoadingOptions { options: vec![] }; + let mut credentials: DataLoadingOptions = DataLoadingOptions { options: vec![] }; // URL if parser.parse_keyword(Keyword::URL) { @@ -1127,7 +724,7 @@ fn parse_stage_params(parser: &mut Parser) -> Result Result Result Result, ParserError> { - let mut options: Vec = Vec::new(); - let empty = String::new; - loop { - let next_token = parser.peek_token(); - match next_token.token { - Token::SemiColon | Token::EOF => break, - Token::Comma => { - parser.advance_token(); - continue; - } - Token::Word(key) => { - parser.advance_token(); - if set { - let option = parse_option(parser, key)?; - options.push(option); - } else { - options.push(KeyValueOption { - option_name: key.value, - option_type: KeyValueOptionType::STRING, - value: empty(), - }); - } - } - _ => { - return parser.expected("another option or end of statement", next_token); - } - } - } - if options.is_empty() { - Err(ParserError::ParserError( - "expected at least one option".to_string(), - )) - } else { - Ok(options) - } -} - /// Parses options provided within parentheses like: /// ( ENABLE = { TRUE | FALSE } /// [ AUTO_REFRESH = { TRUE | FALSE } ] /// [ REFRESH_ON_CREATE = { TRUE | FALSE } ] /// [ NOTIFICATION_INTEGRATION = '' ] ) /// -fn parse_parentheses_options(parser: &mut Parser) -> Result, ParserError> { - let mut options: Vec = Vec::new(); +fn parse_parentheses_options(parser: &mut Parser) -> Result, ParserError> { + let mut options: Vec = Vec::new(); + parser.expect_token(&Token::LParen)?; loop { match parser.next_token().token { Token::RParen => break, - Token::Comma => continue, - Token::Word(key) => options.push(parse_option(parser, key)?), - _ => return parser.expected("another option or ')'", parser.peek_token()), - }; + Token::Word(key) => { + parser.expect_token(&Token::Eq)?; + if parser.parse_keyword(Keyword::TRUE) { + options.push(DataLoadingOption { + option_name: key.value, + option_type: DataLoadingOptionType::BOOLEAN, + value: "TRUE".to_string(), + }); + Ok(()) + } else if parser.parse_keyword(Keyword::FALSE) { + options.push(DataLoadingOption { + option_name: key.value, + option_type: DataLoadingOptionType::BOOLEAN, + value: "FALSE".to_string(), + }); + Ok(()) + } else { + match parser.next_token().token { + Token::SingleQuotedString(value) => { + options.push(DataLoadingOption { + option_name: key.value, + option_type: DataLoadingOptionType::STRING, + value, + }); + Ok(()) + } + Token::Word(word) => { + options.push(DataLoadingOption { + option_name: key.value, + option_type: DataLoadingOptionType::ENUM, + value: word.value, + }); + Ok(()) + } + _ => parser.expected("expected option value", parser.peek_token()), + } + } + } + _ => parser.expected("another option or ')'", parser.peek_token()), + }?; } Ok(options) } -/// Parses a `KEY = VALUE` construct based on the specified key -fn parse_option(parser: &mut Parser, key: Word) -> Result { - parser.expect_token(&Token::Eq)?; - if parser.parse_keyword(Keyword::TRUE) { - Ok(KeyValueOption { - option_name: key.value, - option_type: KeyValueOptionType::BOOLEAN, - value: "TRUE".to_string(), - }) - } else if parser.parse_keyword(Keyword::FALSE) { - Ok(KeyValueOption { - option_name: key.value, - option_type: KeyValueOptionType::BOOLEAN, - value: "FALSE".to_string(), - }) - } else { - match parser.next_token().token { - Token::SingleQuotedString(value) => Ok(KeyValueOption { - option_name: key.value, - option_type: KeyValueOptionType::STRING, - value, - }), - Token::Word(word) => Ok(KeyValueOption { - option_name: key.value, - option_type: KeyValueOptionType::ENUM, - value: word.value, - }), - Token::Number(n, _) => Ok(KeyValueOption { - option_name: key.value, - option_type: KeyValueOptionType::NUMBER, - value: n, - }), - _ => parser.expected("expected option value", parser.peek_token()), - } - } -} - /// Parsing a property of identity or autoincrement column option /// Syntax: /// ```sql @@ -1269,7 +821,7 @@ fn parse_identity_property(parser: &mut Parser) -> Result Result { - let policy_name = parser.parse_object_name(false)?; + let policy_name = parser.parse_identifier(false)?; let using_columns = if parser.parse_keyword(Keyword::USING) { parser.expect_token(&Token::LParen)?; - let columns = parser.parse_comma_separated(|p| p.parse_identifier())?; + let columns = parser.parse_comma_separated(|p| p.parse_identifier(false))?; parser.expect_token(&Token::RParen)?; Some(columns) } else { @@ -1326,13 +878,3 @@ fn parse_column_tags(parser: &mut Parser, with: bool) -> Result -fn parse_show_objects(terse: bool, parser: &mut Parser) -> Result { - let show_options = parser.parse_show_stmt_options()?; - Ok(Statement::ShowObjects(ShowObjects { - terse, - show_options, - })) -} diff --git a/src/dialect/sqlite.rs b/src/dialect/sqlite.rs index 847e0d13..95717f9f 100644 --- a/src/dialect/sqlite.rs +++ b/src/dialect/sqlite.rs @@ -15,11 +15,7 @@ // specific language governing permissions and limitations // under the License. -#[cfg(not(feature = "std"))] -use alloc::boxed::Box; - -use crate::ast::BinaryOperator; -use crate::ast::{Expr, Statement}; +use crate::ast::Statement; use crate::dialect::Dialect; use crate::keywords::Keyword; use crate::parser::{Parser, ParserError}; @@ -74,27 +70,6 @@ impl Dialect for SQLiteDialect { } } - fn parse_infix( - &self, - parser: &mut crate::parser::Parser, - expr: &crate::ast::Expr, - _precedence: u8, - ) -> Option> { - // Parse MATCH and REGEXP as operators - // See - for (keyword, op) in [ - (Keyword::REGEXP, BinaryOperator::Regexp), - (Keyword::MATCH, BinaryOperator::Match), - ] { - if parser.parse_keyword(keyword) { - let left = Box::new(expr.clone()); - let right = Box::new(parser.parse_expr().unwrap()); - return Some(Ok(Expr::BinaryOp { left, op, right })); - } - } - None - } - fn supports_in_empty_list(&self) -> bool { true } @@ -106,8 +81,4 @@ impl Dialect for SQLiteDialect { fn supports_asc_desc_in_column_definition(&self) -> bool { true } - - fn supports_dollar_placeholder(&self) -> bool { - true - } } diff --git a/src/display_utils.rs b/src/display_utils.rs deleted file mode 100644 index ba36fccd..00000000 --- a/src/display_utils.rs +++ /dev/null @@ -1,135 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -//! Utilities for formatting SQL AST nodes with pretty printing support. -//! -//! The module provides formatters that implement the `Display` trait with support -//! for both regular (`{}`) and pretty (`{:#}`) formatting modes. Pretty printing -//! adds proper indentation and line breaks to make SQL statements more readable. - -use core::fmt::{self, Display, Write}; - -/// A wrapper around a value that adds an indent to the value when displayed with {:#}. -pub(crate) struct Indent(pub T); - -const INDENT: &str = " "; - -impl Display for Indent -where - T: Display, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if f.alternate() { - f.write_str(INDENT)?; - write!(Indent(f), "{:#}", self.0) - } else { - self.0.fmt(f) - } - } -} - -/// Adds an indent to the inner writer -impl Write for Indent -where - T: Write, -{ - fn write_str(&mut self, s: &str) -> fmt::Result { - self.0.write_str(s)?; - // Our NewLine and SpaceOrNewline utils always print individual newlines as a single-character string. - if s == "\n" { - self.0.write_str(INDENT)?; - } - Ok(()) - } -} - -/// A value that inserts a newline when displayed with {:#}, but not when displayed with {}. -pub(crate) struct NewLine; - -impl Display for NewLine { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if f.alternate() { - f.write_char('\n') - } else { - Ok(()) - } - } -} - -/// A value that inserts a space when displayed with {}, but a newline when displayed with {:#}. -pub(crate) struct SpaceOrNewline; - -impl Display for SpaceOrNewline { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if f.alternate() { - f.write_char('\n') - } else { - f.write_char(' ') - } - } -} - -/// A value that displays a comma-separated list of values. -/// When pretty-printed (using {:#}), it displays each value on a new line. -pub(crate) struct DisplayCommaSeparated<'a, T: fmt::Display>(pub(crate) &'a [T]); - -impl fmt::Display for DisplayCommaSeparated<'_, T> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut first = true; - for t in self.0 { - if !first { - f.write_char(',')?; - SpaceOrNewline.fmt(f)?; - } - first = false; - t.fmt(f)?; - } - Ok(()) - } -} - -/// Displays a whitespace, followed by a comma-separated list that is indented when pretty-printed. -pub(crate) fn indented_list(f: &mut fmt::Formatter, items: &[T]) -> fmt::Result { - SpaceOrNewline.fmt(f)?; - Indent(DisplayCommaSeparated(items)).fmt(f) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_indent() { - struct TwoLines; - - impl Display for TwoLines { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("line 1")?; - SpaceOrNewline.fmt(f)?; - f.write_str("line 2") - } - } - - let indent = Indent(TwoLines); - assert_eq!( - indent.to_string(), - TwoLines.to_string(), - "Only the alternate form should be indented" - ); - assert_eq!(format!("{:#}", indent), " line 1\n line 2"); - } -} diff --git a/src/keywords.rs b/src/keywords.rs index 9e689a6d..d60227c9 100644 --- a/src/keywords.rs +++ b/src/keywords.rs @@ -18,14 +18,14 @@ //! This module defines //! 1) a list of constants for every keyword //! 2) an `ALL_KEYWORDS` array with every keyword in it -//! This is not a list of *reserved* keywords: some of these can be -//! parsed as identifiers if the parser decides so. This means that -//! new keywords can be added here without affecting the parse result. +//! This is not a list of *reserved* keywords: some of these can be +//! parsed as identifiers if the parser decides so. This means that +//! new keywords can be added here without affecting the parse result. //! -//! As a matter of fact, most of these keywords are not used at all -//! and could be removed. +//! As a matter of fact, most of these keywords are not used at all +//! and could be removed. //! 3) a `RESERVED_FOR_TABLE_ALIAS` array with keywords reserved in a -//! "table alias" context. +//! "table alias" context. #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -74,19 +74,14 @@ macro_rules! define_keywords { define_keywords!( ABORT, ABS, - ABSENT, ABSOLUTE, ACCESS, - ACCOUNT, ACTION, ADD, ADMIN, AFTER, AGAINST, - AGGREGATE, AGGREGATION, - ALERT, - ALGORITHM, ALIAS, ALL, ALLOCATE, @@ -96,9 +91,7 @@ define_keywords!( AND, ANTI, ANY, - APPLICATION, APPLY, - APPLYBUDGET, ARCHIVE, ARE, ARRAY, @@ -112,44 +105,31 @@ define_keywords!( AT, ATOMIC, ATTACH, - AUDIT, - AUTHENTICATION, AUTHORIZATION, AUTO, - AUTOEXTEND_SIZE, AUTOINCREMENT, AUTO_INCREMENT, AVG, - AVG_ROW_LENGTH, AVRO, BACKWARD, BASE64, - BASE_LOCATION, BEFORE, BEGIN, BEGIN_FRAME, BEGIN_PARTITION, - BERNOULLI, BETWEEN, BIGDECIMAL, BIGINT, BIGNUMERIC, BINARY, - BIND, BINDING, - BIT, BLOB, - BLOCK, - BLOOM, BLOOMFILTER, BOOL, BOOLEAN, BOTH, - BOX, - BRIN, BROWSE, BTREE, - BUCKET, BUCKETS, BY, BYPASSRLS, @@ -162,11 +142,8 @@ define_keywords!( CASCADE, CASCADED, CASE, - CASES, CAST, CATALOG, - CATALOG_SYNC, - CATCH, CEIL, CEILING, CENTURY, @@ -176,21 +153,17 @@ define_keywords!( CHANNEL, CHAR, CHARACTER, - CHARACTERISTICS, CHARACTERS, CHARACTER_LENGTH, CHARSET, CHAR_LENGTH, CHECK, - CHECKSUM, - CIRCLE, CLEAR, CLOB, CLONE, CLOSE, CLUSTER, CLUSTERED, - CLUSTERING, COALESCE, COLLATE, COLLATION, @@ -202,7 +175,6 @@ define_keywords!( COMMENT, COMMIT, COMMITTED, - COMPATIBLE, COMPRESSION, COMPUTE, CONCURRENTLY, @@ -210,8 +182,6 @@ define_keywords!( CONFLICT, CONNECT, CONNECTION, - CONNECTOR, - CONNECT_BY_ROOT, CONSTRAINT, CONTAINS, CONTINUE, @@ -256,8 +226,6 @@ define_keywords!( DAY, DAYOFWEEK, DAYOFYEAR, - DAYS, - DCPROPERTIES, DEALLOCATE, DEC, DECADE, @@ -270,15 +238,12 @@ define_keywords!( DEFERRED, DEFINE, DEFINED, - DEFINER, DELAYED, - DELAY_KEY_WRITE, DELETE, DELIMITED, DELIMITER, DELTA, DENSE_RANK, - DENY, DEREF, DESC, DESCRIBE, @@ -293,7 +258,6 @@ define_keywords!( DISTRIBUTE, DIV, DO, - DOMAIN, DOUBLE, DOW, DOY, @@ -305,7 +269,6 @@ define_keywords!( ELEMENT, ELEMENTS, ELSE, - ELSEIF, EMPTY, ENABLE, ENABLE_SCHEMA_EVOLUTION, @@ -318,42 +281,32 @@ define_keywords!( END_PARTITION, ENFORCED, ENGINE, - ENGINE_ATTRIBUTE, ENUM, - ENUM16, - ENUM8, EPHEMERAL, EPOCH, EQUALS, ERROR, ESCAPE, ESCAPED, - ESTIMATE, EVENT, EVERY, - EVOLVE, EXCEPT, EXCEPTION, - EXCHANGE, EXCLUDE, EXCLUSIVE, EXEC, EXECUTE, - EXECUTION, EXISTS, EXP, EXPANSION, EXPLAIN, EXPLICIT, EXPORT, - EXTEND, EXTENDED, EXTENSION, EXTERNAL, - EXTERNAL_VOLUME, EXTRACT, FAIL, - FAILOVER, FALSE, FETCH, FIELDS, @@ -373,7 +326,6 @@ define_keywords!( FLOAT8, FLOOR, FLUSH, - FN, FOLLOWING, FOR, FORCE, @@ -389,20 +341,16 @@ define_keywords!( FREEZE, FROM, FSCK, - FULFILLMENT, FULL, FULLTEXT, FUNCTION, FUNCTIONS, FUSION, - FUTURE, GENERAL, GENERATE, GENERATED, GEOGRAPHY, GET, - GIN, - GIST, GLOBAL, GRANT, GRANTED, @@ -422,18 +370,13 @@ define_keywords!( HOSTS, HOUR, HOURS, - HUGEINT, - ICEBERG, ID, IDENTITY, - IDENTITY_INSERT, IF, IGNORE, ILIKE, IMMEDIATE, IMMUTABLE, - IMPORT, - IMPORTED, IN, INCLUDE, INCLUDE_NULL_VALUES, @@ -441,19 +384,14 @@ define_keywords!( INDEX, INDICATOR, INHERIT, - INHERITS, INITIALLY, INNER, INOUT, - INPATH, - INPLACE, INPUT, INPUTFORMAT, INSENSITIVE, INSERT, - INSERT_METHOD, INSTALL, - INSTANT, INSTEAD, INT, INT128, @@ -465,14 +403,11 @@ define_keywords!( INT64, INT8, INTEGER, - INTEGRATION, INTERPOLATE, INTERSECT, INTERSECTION, INTERVAL, INTO, - INVOKER, - IO, IS, ISODOW, ISOLATION, @@ -488,7 +423,6 @@ define_keywords!( JULIAN, KEY, KEYS, - KEY_BLOCK_SIZE, KILL, LAG, LANGUAGE, @@ -503,11 +437,8 @@ define_keywords!( LIKE, LIKE_REGEX, LIMIT, - LINE, LINES, - LIST, LISTEN, - LISTING, LN, LOAD, LOCAL, @@ -516,19 +447,12 @@ define_keywords!( LOCATION, LOCK, LOCKED, - LOG, LOGIN, LOGS, - LONGBLOB, - LONGTEXT, LOWCARDINALITY, LOWER, LOW_PRIORITY, - LS, - LSEG, MACRO, - MANAGE, - MANAGED, MANAGEDLOCATION, MAP, MASKING, @@ -542,17 +466,12 @@ define_keywords!( MAX, MAXVALUE, MAX_DATA_EXTENSION_TIME_IN_DAYS, - MAX_ROWS, MEASURES, - MEDIUMBLOB, MEDIUMINT, - MEDIUMTEXT, MEMBER, MERGE, - MESSAGE, METADATA, METHOD, - METRIC, MICROSECOND, MICROSECONDS, MILLENIUM, @@ -560,24 +479,18 @@ define_keywords!( MILLISECOND, MILLISECONDS, MIN, - MINUS, MINUTE, - MINUTES, MINVALUE, - MIN_ROWS, MOD, MODE, MODIFIES, MODIFY, MODULE, - MONITOR, MONTH, - MONTHS, MSCK, MULTISET, MUTATION, NAME, - NAMES, NANOSECOND, NANOSECONDS, NATIONAL, @@ -585,13 +498,8 @@ define_keywords!( NCHAR, NCLOB, NESTED, - NETWORK, NEW, NEXT, - NFC, - NFD, - NFKC, - NFKD, NO, NOBYPASSRLS, NOCREATEDB, @@ -602,7 +510,6 @@ define_keywords!( NOORDER, NOREPLICATION, NORMALIZE, - NORMALIZED, NOSCAN, NOSUPERUSER, NOT, @@ -619,26 +526,19 @@ define_keywords!( NUMERIC, NVARCHAR, OBJECT, - OBJECTS, OCCURRENCES_REGEX, OCTETS, OCTET_LENGTH, OF, - OFF, OFFSET, - OFFSETS, OLD, OMIT, ON, ONE, ONLY, OPEN, - OPENJSON, - OPERATE, OPERATOR, - OPTIMIZATION, OPTIMIZE, - OPTIMIZED, OPTIMIZER_COSTS, OPTION, OPTIONS, @@ -646,24 +546,16 @@ define_keywords!( ORC, ORDER, ORDINALITY, - ORGANIZATION, - OTHER, OUT, OUTER, - OUTPUT, OUTPUTFORMAT, OVER, OVERFLOW, OVERLAPS, OVERLAY, - OVERRIDE, OVERWRITE, OWNED, OWNER, - OWNERSHIP, - PACKAGE, - PACKAGES, - PACK_KEYS, PARALLEL, PARAMETER, PARQUET, @@ -671,7 +563,6 @@ define_keywords!( PARTITION, PARTITIONED, PARTITIONS, - PASSING, PASSWORD, PAST, PATH, @@ -688,10 +579,7 @@ define_keywords!( PLACING, PLAN, PLANS, - POINT, POLICY, - POLYGON, - POOL, PORTION, POSITION, POSITION_REGEX, @@ -704,22 +592,16 @@ define_keywords!( PRESERVE, PREWHERE, PRIMARY, - PRINT, PRIOR, PRIVILEGES, PROCEDURE, - PROFILE, PROGRAM, PROJECTION, - PUBLIC, - PURCHASE, PURGE, QUALIFY, QUARTER, QUERY, QUOTE, - RAISE, - RAISERROR, RANGE, RANK, RAW, @@ -728,7 +610,6 @@ define_keywords!( READS, READ_ONLY, REAL, - RECLUSTER, RECURSIVE, REF, REFERENCES, @@ -747,29 +628,22 @@ define_keywords!( RELATIVE, RELAY, RELEASE, - RELEASES, REMOTE, - REMOVE, RENAME, REORG, REPAIR, REPEATABLE, REPLACE, REPLICA, - REPLICATE, REPLICATION, RESET, - RESOLVE, - RESOURCE, RESPECT, RESTART, RESTRICT, RESTRICTED, - RESTRICTIONS, RESTRICTIVE, RESULT, RESULTSET, - RESUME, RETAIN, RETURN, RETURNING, @@ -777,22 +651,18 @@ define_keywords!( REVOKE, RIGHT, RLIKE, - RM, ROLE, - ROLES, ROLLBACK, ROLLUP, ROOT, ROW, ROWID, ROWS, - ROW_FORMAT, ROW_NUMBER, RULE, RUN, SAFE, SAFE_CAST, - SAMPLE, SAVEPOINT, SCHEMA, SCHEMAS, @@ -800,12 +670,8 @@ define_keywords!( SCROLL, SEARCH, SECOND, - SECONDARY, - SECONDARY_ENGINE_ATTRIBUTE, - SECONDS, SECRET, SECURITY, - SEED, SELECT, SEMI, SENSITIVE, @@ -816,19 +682,13 @@ define_keywords!( SERDE, SERDEPROPERTIES, SERIALIZABLE, - SERVER, - SERVICE, SESSION, SESSION_USER, SET, - SETERROR, SETS, SETTINGS, SHARE, - SHARED, - SHARING, SHOW, - SIGNED, SIMILAR, SKIP, SLOW, @@ -841,47 +701,35 @@ define_keywords!( SPATIAL, SPECIFIC, SPECIFICTYPE, - SPGIST, SQL, SQLEXCEPTION, SQLSTATE, SQLWARNING, SQRT, - SRID, STABLE, STAGE, START, - STARTS, STATEMENT, STATIC, STATISTICS, - STATS_AUTO_RECALC, - STATS_PERSISTENT, - STATS_SAMPLE_PAGES, STATUS, STDDEV_POP, STDDEV_SAMP, STDIN, STDOUT, STEP, - STORAGE, STORAGE_INTEGRATION, - STORAGE_SERIALIZATION_POLICY, STORED, - STRAIGHT_JOIN, STRICT, STRING, STRUCT, SUBMULTISET, - SUBSTR, SUBSTRING, SUBSTRING_REGEX, SUCCEEDS, SUM, SUPER, SUPERUSER, - SUPPORT, - SUSPEND, SWAP, SYMMETRIC, SYNC, @@ -891,16 +739,12 @@ define_keywords!( TABLE, TABLES, TABLESAMPLE, - TABLESPACE, TAG, TARGET, - TASK, TBLPROPERTIES, TEMP, TEMPORARY, - TEMPTABLE, TERMINATED, - TERSE, TEXT, TEXTFILE, THEN, @@ -908,20 +752,16 @@ define_keywords!( TIME, TIMESTAMP, TIMESTAMPTZ, - TIMESTAMP_NTZ, TIMETZ, TIMEZONE, TIMEZONE_ABBR, TIMEZONE_HOUR, TIMEZONE_MINUTE, TIMEZONE_REGION, - TINYBLOB, TINYINT, - TINYTEXT, TO, TOP, TOTALS, - TRACE, TRAILING, TRANSACTION, TRANSIENT, @@ -934,16 +774,11 @@ define_keywords!( TRIM_ARRAY, TRUE, TRUNCATE, - TRY, TRY_CAST, TRY_CONVERT, - TSQUERY, - TSVECTOR, TUPLE, TYPE, - UBIGINT, UESCAPE, - UHUGEINT, UINT128, UINT16, UINT256, @@ -953,12 +788,10 @@ define_keywords!( UNBOUNDED, UNCACHE, UNCOMMITTED, - UNDEFINED, UNFREEZE, UNION, UNIQUE, UNKNOWN, - UNLISTEN, UNLOAD, UNLOCK, UNLOGGED, @@ -966,7 +799,6 @@ define_keywords!( UNNEST, UNPIVOT, UNSAFE, - UNSET, UNSIGNED, UNTIL, UPDATE, @@ -977,18 +809,14 @@ define_keywords!( USER, USER_RESOURCES, USING, - USMALLINT, - UTINYINT, UUID, VACUUM, VALID, - VALIDATE, VALIDATION_MODE, VALUE, VALUES, VALUE_OF, VARBINARY, - VARBIT, VARCHAR, VARIABLES, VARYING, @@ -997,20 +825,15 @@ define_keywords!( VERBOSE, VERSION, VERSIONING, - VERSIONS, VIEW, VIEWS, VIRTUAL, VOLATILE, - VOLUME, WAREHOUSE, - WAREHOUSES, WEEK, - WEEKS, WHEN, WHENEVER, WHERE, - WHILE, WIDTH_BUCKET, WINDOW, WITH, @@ -1018,14 +841,10 @@ define_keywords!( WITHOUT, WITHOUT_ARRAY_WRAPPER, WORK, - WRAPPER, WRITE, XML, - XMLNAMESPACES, - XMLTABLE, XOR, YEAR, - YEARS, ZONE, ZORDER ); @@ -1054,7 +873,6 @@ pub const RESERVED_FOR_TABLE_ALIAS: &[Keyword] = &[ Keyword::UNION, Keyword::EXCEPT, Keyword::INTERSECT, - Keyword::MINUS, // Reserved only as a table alias in the `FROM`/`JOIN` clauses: Keyword::ON, Keyword::JOIN, @@ -1068,11 +886,6 @@ pub const RESERVED_FOR_TABLE_ALIAS: &[Keyword] = &[ Keyword::CLUSTER, Keyword::DISTRIBUTE, Keyword::GLOBAL, - Keyword::ANTI, - Keyword::SEMI, - Keyword::RETURNING, - Keyword::ASOF, - Keyword::MATCH_CONDITION, // for MSSQL-specific OUTER APPLY (seems reserved in most dialects) Keyword::OUTER, Keyword::SET, @@ -1084,18 +897,15 @@ pub const RESERVED_FOR_TABLE_ALIAS: &[Keyword] = &[ Keyword::PARTITION, // for Clickhouse PREWHERE Keyword::PREWHERE, + // for ClickHouse SELECT * FROM t SETTINGS ... Keyword::SETTINGS, + // for ClickHouse SELECT * FROM t FORMAT... Keyword::FORMAT, // for Snowflake START WITH .. CONNECT BY Keyword::START, Keyword::CONNECT, // Reserved for snowflake MATCH_RECOGNIZE Keyword::MATCH_RECOGNIZE, - // Reserved for Snowflake table sample - Keyword::SAMPLE, - Keyword::TABLESAMPLE, - Keyword::FROM, - Keyword::OPEN, ]; /// Can't be used as a column alias, so that `SELECT alias` @@ -1119,9 +929,7 @@ pub const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[ Keyword::FETCH, Keyword::UNION, Keyword::EXCEPT, - Keyword::EXCLUDE, Keyword::INTERSECT, - Keyword::MINUS, Keyword::CLUSTER, Keyword::DISTRIBUTE, Keyword::RETURNING, @@ -1130,23 +938,3 @@ pub const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[ Keyword::INTO, Keyword::END, ]; - -// Global list of reserved keywords allowed after FROM. -// Parser should call Dialect::get_reserved_keyword_after_from -// to allow for each dialect to customize the list. -pub const RESERVED_FOR_TABLE_FACTOR: &[Keyword] = &[ - Keyword::INTO, - Keyword::LIMIT, - Keyword::HAVING, - Keyword::WHERE, -]; - -/// Global list of reserved keywords that cannot be parsed as identifiers -/// without special handling like quoting. Parser should call `Dialect::is_reserved_for_identifier` -/// to allow for each dialect to customize the list. -pub const RESERVED_FOR_IDENTIFIER: &[Keyword] = &[ - Keyword::EXISTS, - Keyword::INTERVAL, - Keyword::STRUCT, - Keyword::TRIM, -]; diff --git a/src/lib.rs b/src/lib.rs index dbfd1791..6c8987b6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -25,9 +25,6 @@ //! 1. [`Parser::parse_sql`] and [`Parser::new`] for the Parsing API //! 2. [`ast`] for the AST structure //! 3. [`Dialect`] for supported SQL dialects -//! 4. [`Spanned`] for source text locations (see "Source Spans" below for details) -//! -//! [`Spanned`]: ast::Spanned //! //! # Example parsing SQL text //! @@ -65,94 +62,15 @@ //! assert_eq!(ast[0].to_string(), sql); //! ``` //! -//! # Pretty Printing -//! -//! SQL statements can be pretty-printed with proper indentation and line breaks using the alternate flag (`{:#}`): -//! -//! ``` -//! # use sqlparser::dialect::GenericDialect; -//! # use sqlparser::parser::Parser; -//! let sql = "SELECT a, b FROM table_1"; -//! let ast = Parser::parse_sql(&GenericDialect, sql).unwrap(); -//! -//! // Pretty print with indentation and line breaks -//! let pretty_sql = format!("{:#}", ast[0]); -//! assert_eq!(pretty_sql, r#" -//! SELECT -//! a, -//! b -//! FROM -//! table_1 -//! "#.trim()); -//! ``` //! [sqlparser crates.io page]: https://crates.io/crates/sqlparser //! [`Parser::parse_sql`]: crate::parser::Parser::parse_sql //! [`Parser::new`]: crate::parser::Parser::new //! [`AST`]: crate::ast //! [`ast`]: crate::ast //! [`Dialect`]: crate::dialect::Dialect -//! -//! # Source Spans -//! -//! Starting with version `0.53.0` sqlparser introduced source spans to the -//! AST. This feature provides source information for syntax errors, enabling -//! better error messages. See [issue #1548] for more information and the -//! [`Spanned`] trait to access the spans. -//! -//! [issue #1548]: https://github.com/apache/datafusion-sqlparser-rs/issues/1548 -//! [`Spanned`]: ast::Spanned -//! -//! ## Migration Guide -//! -//! For the next few releases, we will be incrementally adding source spans to the -//! AST nodes, trying to minimize the impact on existing users. Some breaking -//! changes are inevitable, and the following is a summary of the changes: -//! -//! #### New fields for spans (must be added to any existing pattern matches) -//! -//! The primary change is that new fields will be added to AST nodes to store the source `Span` or `TokenWithLocation`. -//! -//! This will require -//! 1. Adding new fields to existing pattern matches. -//! 2. Filling in the proper span information when constructing AST nodes. -//! -//! For example, since `Ident` now stores a `Span`, to construct an `Ident` you -//! must provide now provide one: -//! -//! Previously: -//! ```text -//! # use sqlparser::ast::Ident; -//! Ident { -//! value: "name".into(), -//! quote_style: None, -//! } -//! ``` -//! Now -//! ```rust -//! # use sqlparser::ast::Ident; -//! # use sqlparser::tokenizer::Span; -//! Ident { -//! value: "name".into(), -//! quote_style: None, -//! span: Span::empty(), -//! }; -//! ``` -//! -//! Similarly, when pattern matching on `Ident`, you must now account for the -//! `span` field. -//! -//! #### Misc. -//! - [`TokenWithLocation`] stores a full `Span`, rather than just a source location. -//! Users relying on `token.location` should use `token.location.start` instead. -//! -//![`TokenWithLocation`]: tokenizer::TokenWithLocation #![cfg_attr(not(feature = "std"), no_std)] #![allow(clippy::upper_case_acronyms)] -// Permit large enum variants to keep a unified, expressive AST. -// Splitting complex nodes (expressions, statements, types) into separate types -// would bloat the API and hide intent. Extra memory is a worthwhile tradeoff. -#![allow(clippy::large_enum_variant)] // Allow proc-macros to find this crate extern crate self as sqlparser; @@ -167,7 +85,6 @@ extern crate pretty_assertions; pub mod ast; #[macro_use] pub mod dialect; -mod display_utils; pub mod keywords; pub mod parser; pub mod tokenizer; diff --git a/src/parser/alter.rs b/src/parser/alter.rs index bff462ee..53410579 100644 --- a/src/parser/alter.rs +++ b/src/parser/alter.rs @@ -18,15 +18,15 @@ use alloc::vec; use super::{Parser, ParserError}; use crate::{ ast::{ - AlterConnectorOwner, AlterPolicyOperation, AlterRoleOperation, Expr, Password, ResetConfig, - RoleOption, SetConfigValue, Statement, + AlterPolicyOperation, AlterRoleOperation, Expr, Password, ResetConfig, RoleOption, + SetConfigValue, Statement, }, dialect::{MsSqlDialect, PostgreSqlDialect}, keywords::Keyword, tokenizer::Token, }; -impl Parser<'_> { +impl<'a> Parser<'a> { pub fn parse_alter_role(&mut self) -> Result { if dialect_of!(self is PostgreSqlDialect) { return self.parse_pg_alter_role(); @@ -51,13 +51,13 @@ impl Parser<'_> { /// /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-alterpolicy.html) pub fn parse_alter_policy(&mut self) -> Result { - let name = self.parse_identifier()?; - self.expect_keyword_is(Keyword::ON)?; + let name = self.parse_identifier(false)?; + self.expect_keyword(Keyword::ON)?; let table_name = self.parse_object_name(false)?; if self.parse_keyword(Keyword::RENAME) { - self.expect_keyword_is(Keyword::TO)?; - let new_name = self.parse_identifier()?; + self.expect_keyword(Keyword::TO)?; + let new_name = self.parse_identifier(false)?; Ok(Statement::AlterPolicy { name, table_name, @@ -99,59 +99,18 @@ impl Parser<'_> { } } - /// Parse an `ALTER CONNECTOR` statement - /// ```sql - /// ALTER CONNECTOR connector_name SET DCPROPERTIES(property_name=property_value, ...); - /// - /// ALTER CONNECTOR connector_name SET URL new_url; - /// - /// ALTER CONNECTOR connector_name SET OWNER [USER|ROLE] user_or_role; - /// ``` - pub fn parse_alter_connector(&mut self) -> Result { - let name = self.parse_identifier()?; - self.expect_keyword_is(Keyword::SET)?; - - let properties = match self.parse_options_with_keywords(&[Keyword::DCPROPERTIES])? { - properties if !properties.is_empty() => Some(properties), - _ => None, - }; - - let url = if self.parse_keyword(Keyword::URL) { - Some(self.parse_literal_string()?) - } else { - None - }; - - let owner = if self.parse_keywords(&[Keyword::OWNER, Keyword::USER]) { - let owner = self.parse_identifier()?; - Some(AlterConnectorOwner::User(owner)) - } else if self.parse_keywords(&[Keyword::OWNER, Keyword::ROLE]) { - let owner = self.parse_identifier()?; - Some(AlterConnectorOwner::Role(owner)) - } else { - None - }; - - Ok(Statement::AlterConnector { - name, - properties, - url, - owner, - }) - } - fn parse_mssql_alter_role(&mut self) -> Result { - let role_name = self.parse_identifier()?; + let role_name = self.parse_identifier(false)?; let operation = if self.parse_keywords(&[Keyword::ADD, Keyword::MEMBER]) { - let member_name = self.parse_identifier()?; + let member_name = self.parse_identifier(false)?; AlterRoleOperation::AddMember { member_name } } else if self.parse_keywords(&[Keyword::DROP, Keyword::MEMBER]) { - let member_name = self.parse_identifier()?; + let member_name = self.parse_identifier(false)?; AlterRoleOperation::DropMember { member_name } } else if self.parse_keywords(&[Keyword::WITH, Keyword::NAME]) { if self.consume_token(&Token::Eq) { - let role_name = self.parse_identifier()?; + let role_name = self.parse_identifier(false)?; AlterRoleOperation::RenameRole { role_name } } else { return self.expected("= after WITH NAME ", self.peek_token()); @@ -167,7 +126,7 @@ impl Parser<'_> { } fn parse_pg_alter_role(&mut self) -> Result { - let role_name = self.parse_identifier()?; + let role_name = self.parse_identifier(false)?; // [ IN DATABASE _`database_name`_ ] let in_database = if self.parse_keywords(&[Keyword::IN, Keyword::DATABASE]) { @@ -178,7 +137,7 @@ impl Parser<'_> { let operation = if self.parse_keyword(Keyword::RENAME) { if self.parse_keyword(Keyword::TO) { - let role_name = self.parse_identifier()?; + let role_name = self.parse_identifier(false)?; AlterRoleOperation::RenameRole { role_name } } else { return self.expected("TO after RENAME", self.peek_token()); @@ -273,7 +232,7 @@ impl Parser<'_> { Some(Keyword::BYPASSRLS) => RoleOption::BypassRLS(true), Some(Keyword::NOBYPASSRLS) => RoleOption::BypassRLS(false), Some(Keyword::CONNECTION) => { - self.expect_keyword_is(Keyword::LIMIT)?; + self.expect_keyword(Keyword::LIMIT)?; RoleOption::ConnectionLimit(Expr::Value(self.parse_number_value()?)) } Some(Keyword::CREATEDB) => RoleOption::CreateDB(true), @@ -297,7 +256,7 @@ impl Parser<'_> { Some(Keyword::SUPERUSER) => RoleOption::SuperUser(true), Some(Keyword::NOSUPERUSER) => RoleOption::SuperUser(false), Some(Keyword::VALID) => { - self.expect_keyword_is(Keyword::UNTIL)?; + self.expect_keyword(Keyword::UNTIL)?; RoleOption::ValidUntil(Expr::Value(self.parse_value()?)) } _ => self.expected("option", self.peek_token())?, diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 47b63da8..2bd45436 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -24,7 +24,6 @@ use core::{ fmt::{self, Display}, str::FromStr, }; -use helpers::attached_token::AttachedToken; use log::debug; @@ -48,6 +47,9 @@ pub enum ParserError { RecursionLimitExceeded, } +// avoid clippy type_complexity warnings +type ParsedAction = (Keyword, Option>); + // Use `Parser::expected` instead, if possible macro_rules! parser_err { ($MSG:expr, $loc:expr) => { @@ -70,9 +72,6 @@ mod recursion { /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust /// borrow checker so the automatic [`DepthGuard`] decrement a /// reference to the counter. - /// - /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection - /// for some of its recursive methods. See [`recursive::recursive`] for more information. pub(crate) struct RecursionCounter { remaining_depth: Rc>, } @@ -186,15 +185,6 @@ impl std::error::Error for ParserError {} // By default, allow expressions up to this deep before erroring const DEFAULT_REMAINING_DEPTH: usize = 50; -// A constant EOF token that can be referenced. -const EOF_TOKEN: TokenWithSpan = TokenWithSpan { - token: Token::EOF, - span: Span { - start: Location { line: 0, column: 0 }, - end: Location { line: 0, column: 0 }, - }, -}; - /// Composite types declarations using angle brackets syntax can be arbitrary /// nested such that the following declaration is possible: /// `ARRAY>` @@ -222,9 +212,6 @@ pub struct ParserOptions { /// Controls how literal values are unescaped. See /// [`Tokenizer::with_unescape`] for more details. pub unescape: bool, - /// Controls if the parser expects a semi-colon token - /// between statements. Default is `true`. - pub require_semicolon_stmt_delimiter: bool, } impl Default for ParserOptions { @@ -232,7 +219,6 @@ impl Default for ParserOptions { Self { trailing_commas: false, unescape: true, - require_semicolon_stmt_delimiter: true, } } } @@ -277,58 +263,19 @@ enum ParserState { ConnectBy, } -/// A SQL Parser -/// -/// This struct is the main entry point for parsing SQL queries. -/// -/// # Functionality: -/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`] -/// * Controlling recursion: See [`Parser::with_recursion_limit`] -/// * Controlling parser options: See [`Parser::with_options`] -/// * Providing your own tokens: See [`Parser::with_tokens`] -/// -/// # Internals -/// -/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a -/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token -/// being processed. The token vec may contain multiple SQL statements. -/// -/// * The "current" token is the token at `index - 1` -/// * The "next" token is the token at `index` -/// * The "previous" token is the token at `index - 2` -/// -/// If `index` is equal to the length of the token stream, the 'next' token is -/// [`Token::EOF`]. -/// -/// For example, the SQL string "SELECT * FROM foo" will be tokenized into -/// following tokens: -/// ```text -/// [ -/// "SELECT", // token index 0 -/// " ", // whitespace -/// "*", -/// " ", -/// "FROM", -/// " ", -/// "foo" -/// ] -/// ``` -/// -/// pub struct Parser<'a> { - /// The tokens - tokens: Vec, + tokens: Vec, /// The index of the first unprocessed token in [`Parser::tokens`]. index: usize, /// The current state of the parser. state: ParserState, - /// The SQL dialect to use. + /// The current dialect to use. dialect: &'a dyn Dialect, /// Additional options that allow you to mix & match behavior /// otherwise constrained to certain dialects (e.g. trailing /// commas) and/or format of parse (e.g. unescaping). options: ParserOptions, - /// Ensures the stack does not overflow by limiting recursion depth. + /// Ensure the stack does not overflow by limiting recursion depth. recursion_counter: RecursionCounter, } @@ -378,9 +325,6 @@ impl<'a> Parser<'a> { /// # Ok(()) /// # } /// ``` - /// - /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection - // for some of its recursive methods. See [`recursive::recursive`] for more information. pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self { self.recursion_counter = RecursionCounter::new(recursion_limit); self @@ -414,7 +358,7 @@ impl<'a> Parser<'a> { } /// Reset this parser to parse the specified token stream - pub fn with_tokens_with_locations(mut self, tokens: Vec) -> Self { + pub fn with_tokens_with_locations(mut self, tokens: Vec) -> Self { self.tokens = tokens; self.index = 0; self @@ -423,11 +367,11 @@ impl<'a> Parser<'a> { /// Reset this parser state to parse the specified tokens pub fn with_tokens(self, tokens: Vec) -> Self { // Put in dummy locations - let tokens_with_locations: Vec = tokens + let tokens_with_locations: Vec = tokens .into_iter() - .map(|token| TokenWithSpan { + .map(|token| TokenWithLocation { token, - span: Span::empty(), + location: Location { line: 0, column: 0 }, }) .collect(); self.with_tokens_with_locations(tokens_with_locations) @@ -440,7 +384,7 @@ impl<'a> Parser<'a> { /// /// See example on [`Parser::new()`] for an example pub fn try_with_sql(self, sql: &str) -> Result { - debug!("Parsing sql '{sql}'..."); + debug!("Parsing sql '{}'...", sql); let tokens = Tokenizer::new(self.dialect, sql) .with_unescape(self.options.unescape) .tokenize_with_location()?; @@ -471,10 +415,6 @@ impl<'a> Parser<'a> { expecting_statement_delimiter = false; } - if !self.options.require_semicolon_stmt_delimiter { - expecting_statement_delimiter = false; - } - match self.peek_token().token { Token::EOF => break, @@ -536,23 +476,7 @@ impl<'a> Parser<'a> { Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe), Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain), Keyword::ANALYZE => self.parse_analyze(), - Keyword::CASE => { - self.prev_token(); - self.parse_case_stmt() - } - Keyword::IF => { - self.prev_token(); - self.parse_if_stmt() - } - Keyword::WHILE => { - self.prev_token(); - self.parse_while() - } - Keyword::RAISE => { - self.prev_token(); - self.parse_raise_stmt() - } - Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => { + Keyword::SELECT | Keyword::WITH | Keyword::VALUES => { self.prev_token(); self.parse_query().map(Statement::Query) } @@ -582,57 +506,51 @@ impl<'a> Parser<'a> { Keyword::ALTER => self.parse_alter(), Keyword::CALL => self.parse_call(), Keyword::COPY => self.parse_copy(), - Keyword::OPEN => { - self.prev_token(); - self.parse_open() - } Keyword::CLOSE => self.parse_close(), Keyword::SET => self.parse_set(), Keyword::SHOW => self.parse_show(), Keyword::USE => self.parse_use(), Keyword::GRANT => self.parse_grant(), - Keyword::DENY => { - self.prev_token(); - self.parse_deny() - } Keyword::REVOKE => self.parse_revoke(), Keyword::START => self.parse_start_transaction(), + // `BEGIN` is a nonstandard but common alias for the + // standard `START TRANSACTION` statement. It is supported + // by at least PostgreSQL and MySQL. Keyword::BEGIN => self.parse_begin(), + // `END` is a nonstandard but common alias for the + // standard `COMMIT TRANSACTION` statement. It is supported + // by PostgreSQL. Keyword::END => self.parse_end(), Keyword::SAVEPOINT => self.parse_savepoint(), Keyword::RELEASE => self.parse_release(), Keyword::COMMIT => self.parse_commit(), - Keyword::RAISERROR => Ok(self.parse_raiserror()?), Keyword::ROLLBACK => self.parse_rollback(), Keyword::ASSERT => self.parse_assert(), // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific // syntaxes. They are used for Postgres prepared statement. Keyword::DEALLOCATE => self.parse_deallocate(), - Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(), + Keyword::EXECUTE => self.parse_execute(), Keyword::PREPARE => self.parse_prepare(), Keyword::MERGE => self.parse_merge(), - // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific + // `LISTEN` and `NOTIFY` are Postgres-specific // syntaxes. They are used for Postgres statement. - Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(), - Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(), - Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(), + Keyword::LISTEN if self.dialect.supports_listen() => self.parse_listen(), + Keyword::NOTIFY if self.dialect.supports_notify() => self.parse_notify(), // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html Keyword::PRAGMA => self.parse_pragma(), Keyword::UNLOAD => self.parse_unload(), - Keyword::RENAME => self.parse_rename(), // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => { self.parse_install() } - Keyword::LOAD => self.parse_load(), + // `LOAD` is duckdb specific https://duckdb.org/docs/extensions/overview + Keyword::LOAD if dialect_of!(self is DuckDbDialect | GenericDialect) => { + self.parse_load() + } // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/ Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => { self.parse_optimize_table() } - // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment - Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(), - Keyword::PRINT => self.parse_print(), - Keyword::RETURN => self.parse_return(), _ => self.expected("an SQL statement", next_token), }, Token::LParen => { @@ -643,215 +561,6 @@ impl<'a> Parser<'a> { } } - /// Parse a `CASE` statement. - /// - /// See [Statement::Case] - pub fn parse_case_stmt(&mut self) -> Result { - let case_token = self.expect_keyword(Keyword::CASE)?; - - let match_expr = if self.peek_keyword(Keyword::WHEN) { - None - } else { - Some(self.parse_expr()?) - }; - - self.expect_keyword_is(Keyword::WHEN)?; - let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| { - parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END]) - })?; - - let else_block = if self.parse_keyword(Keyword::ELSE) { - Some(self.parse_conditional_statement_block(&[Keyword::END])?) - } else { - None - }; - - let mut end_case_token = self.expect_keyword(Keyword::END)?; - if self.peek_keyword(Keyword::CASE) { - end_case_token = self.expect_keyword(Keyword::CASE)?; - } - - Ok(Statement::Case(CaseStatement { - case_token: AttachedToken(case_token), - match_expr, - when_blocks, - else_block, - end_case_token: AttachedToken(end_case_token), - })) - } - - /// Parse an `IF` statement. - /// - /// See [Statement::If] - pub fn parse_if_stmt(&mut self) -> Result { - self.expect_keyword_is(Keyword::IF)?; - let if_block = self.parse_conditional_statement_block(&[ - Keyword::ELSE, - Keyword::ELSEIF, - Keyword::END, - ])?; - - let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) { - self.parse_keyword_separated(Keyword::ELSEIF, |parser| { - parser.parse_conditional_statement_block(&[ - Keyword::ELSEIF, - Keyword::ELSE, - Keyword::END, - ]) - })? - } else { - vec![] - }; - - let else_block = if self.parse_keyword(Keyword::ELSE) { - Some(self.parse_conditional_statement_block(&[Keyword::END])?) - } else { - None - }; - - self.expect_keyword_is(Keyword::END)?; - let end_token = self.expect_keyword(Keyword::IF)?; - - Ok(Statement::If(IfStatement { - if_block, - elseif_blocks, - else_block, - end_token: Some(AttachedToken(end_token)), - })) - } - - /// Parse a `WHILE` statement. - /// - /// See [Statement::While] - fn parse_while(&mut self) -> Result { - self.expect_keyword_is(Keyword::WHILE)?; - let while_block = self.parse_conditional_statement_block(&[Keyword::END])?; - - Ok(Statement::While(WhileStatement { while_block })) - } - - /// Parses an expression and associated list of statements - /// belonging to a conditional statement like `IF` or `WHEN` or `WHILE`. - /// - /// Example: - /// ```sql - /// IF condition THEN statement1; statement2; - /// ``` - fn parse_conditional_statement_block( - &mut self, - terminal_keywords: &[Keyword], - ) -> Result { - let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?; - let mut then_token = None; - - let condition = match &start_token.token { - Token::Word(w) if w.keyword == Keyword::ELSE => None, - Token::Word(w) if w.keyword == Keyword::WHILE => { - let expr = self.parse_expr()?; - Some(expr) - } - _ => { - let expr = self.parse_expr()?; - then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?)); - Some(expr) - } - }; - - let conditional_statements = self.parse_conditional_statements(terminal_keywords)?; - - Ok(ConditionalStatementBlock { - start_token: AttachedToken(start_token), - condition, - then_token, - conditional_statements, - }) - } - - /// Parse a BEGIN/END block or a sequence of statements - /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements. - pub(crate) fn parse_conditional_statements( - &mut self, - terminal_keywords: &[Keyword], - ) -> Result { - let conditional_statements = if self.peek_keyword(Keyword::BEGIN) { - let begin_token = self.expect_keyword(Keyword::BEGIN)?; - let statements = self.parse_statement_list(terminal_keywords)?; - let end_token = self.expect_keyword(Keyword::END)?; - - ConditionalStatements::BeginEnd(BeginEndStatements { - begin_token: AttachedToken(begin_token), - statements, - end_token: AttachedToken(end_token), - }) - } else { - ConditionalStatements::Sequence { - statements: self.parse_statement_list(terminal_keywords)?, - } - }; - Ok(conditional_statements) - } - - /// Parse a `RAISE` statement. - /// - /// See [Statement::Raise] - pub fn parse_raise_stmt(&mut self) -> Result { - self.expect_keyword_is(Keyword::RAISE)?; - - let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) { - self.expect_token(&Token::Eq)?; - Some(RaiseStatementValue::UsingMessage(self.parse_expr()?)) - } else { - self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))? - }; - - Ok(Statement::Raise(RaiseStatement { value })) - } - - pub fn parse_comment(&mut self) -> Result { - let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - - self.expect_keyword_is(Keyword::ON)?; - let token = self.next_token(); - - let (object_type, object_name) = match token.token { - Token::Word(w) if w.keyword == Keyword::COLUMN => { - (CommentObject::Column, self.parse_object_name(false)?) - } - Token::Word(w) if w.keyword == Keyword::TABLE => { - (CommentObject::Table, self.parse_object_name(false)?) - } - Token::Word(w) if w.keyword == Keyword::EXTENSION => { - (CommentObject::Extension, self.parse_object_name(false)?) - } - Token::Word(w) if w.keyword == Keyword::SCHEMA => { - (CommentObject::Schema, self.parse_object_name(false)?) - } - Token::Word(w) if w.keyword == Keyword::DATABASE => { - (CommentObject::Database, self.parse_object_name(false)?) - } - Token::Word(w) if w.keyword == Keyword::USER => { - (CommentObject::User, self.parse_object_name(false)?) - } - Token::Word(w) if w.keyword == Keyword::ROLE => { - (CommentObject::Role, self.parse_object_name(false)?) - } - _ => self.expected("comment object_type", token)?, - }; - - self.expect_keyword_is(Keyword::IS)?; - let comment = if self.parse_keyword(Keyword::NULL) { - None - } else { - Some(self.parse_literal_string()?) - }; - Ok(Statement::Comment { - object_type, - object_name, - comment, - if_exists, - }) - } - pub fn parse_flush(&mut self) -> Result { let mut channel = None; let mut tables: Vec = vec![]; @@ -859,7 +568,7 @@ impl<'a> Parser<'a> { let mut export = false; if !dialect_of!(self is MySqlDialect | GenericDialect) { - return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start); + return parser_err!("Unsupported statement FLUSH", self.peek_token().location); } let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) { @@ -941,7 +650,7 @@ impl<'a> Parser<'a> { pub fn parse_msck(&mut self) -> Result { let repair = self.parse_keyword(Keyword::REPAIR); - self.expect_keyword_is(Keyword::TABLE)?; + self.expect_keyword(Keyword::TABLE)?; let table_name = self.parse_object_name(false)?; let partition_action = self .maybe_parse(|parser| { @@ -955,7 +664,7 @@ impl<'a> Parser<'a> { Some(Keyword::SYNC) => Some(AddDropSync::SYNC), _ => None, }; - parser.expect_keyword_is(Keyword::PARTITIONS)?; + parser.expect_keyword(Keyword::PARTITIONS)?; Ok(pa) })? .unwrap_or_default(); @@ -968,13 +677,12 @@ impl<'a> Parser<'a> { pub fn parse_truncate(&mut self) -> Result { let table = self.parse_keyword(Keyword::TABLE); + let only = self.parse_keyword(Keyword::ONLY); let table_names = self - .parse_comma_separated(|p| { - Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?)) - })? + .parse_comma_separated(|p| p.parse_object_name(false))? .into_iter() - .map(|(only, name)| TruncateTableTarget { name, only }) + .map(|n| TruncateTableTarget { name: n }) .collect(); let mut partitions = None; @@ -996,7 +704,13 @@ impl<'a> Parser<'a> { None }; - cascade = self.parse_cascade_option(); + cascade = if self.parse_keyword(Keyword::CASCADE) { + Some(TruncateCascadeOption::Cascade) + } else if self.parse_keyword(Keyword::RESTRICT) { + Some(TruncateCascadeOption::Restrict) + } else { + None + }; }; let on_cluster = self.parse_optional_on_cluster()?; @@ -1005,22 +719,13 @@ impl<'a> Parser<'a> { table_names, partitions, table, + only, identity, cascade, on_cluster, }) } - fn parse_cascade_option(&mut self) -> Option { - if self.parse_keyword(Keyword::CASCADE) { - Some(CascadeOption::Cascade) - } else if self.parse_keyword(Keyword::RESTRICT) { - Some(CascadeOption::Restrict) - } else { - None - } - } - pub fn parse_attach_duckdb_database_options( &mut self, ) -> Result, ParserError> { @@ -1040,7 +745,7 @@ impl<'a> Parser<'a> { }; options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean)); } else if self.parse_keyword(Keyword::TYPE) { - let ident = self.parse_identifier()?; + let ident = self.parse_identifier(false)?; options.push(AttachDuckDBDatabaseOption::Type(ident)); } else { return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token()); @@ -1059,9 +764,9 @@ impl<'a> Parser<'a> { pub fn parse_attach_duckdb_database(&mut self) -> Result { let database = self.parse_keyword(Keyword::DATABASE); let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); - let database_path = self.parse_identifier()?; + let database_path = self.parse_identifier(false)?; let database_alias = if self.parse_keyword(Keyword::AS) { - Some(self.parse_identifier()?) + Some(self.parse_identifier(false)?) } else { None }; @@ -1079,7 +784,7 @@ impl<'a> Parser<'a> { pub fn parse_detach_duckdb_database(&mut self) -> Result { let database = self.parse_keyword(Keyword::DATABASE); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let database_alias = self.parse_identifier()?; + let database_alias = self.parse_identifier(false)?; Ok(Statement::DetachDuckDBDatabase { if_exists, database, @@ -1090,8 +795,8 @@ impl<'a> Parser<'a> { pub fn parse_attach_database(&mut self) -> Result { let database = self.parse_keyword(Keyword::DATABASE); let database_file_name = self.parse_expr()?; - self.expect_keyword_is(Keyword::AS)?; - let schema_name = self.parse_identifier()?; + self.expect_keyword(Keyword::AS)?; + let schema_name = self.parse_identifier(false)?; Ok(Statement::AttachDatabase { database, schema_name, @@ -1100,7 +805,7 @@ impl<'a> Parser<'a> { } pub fn parse_analyze(&mut self) -> Result { - let has_table_keyword = self.parse_keyword(Keyword::TABLE); + self.expect_keyword(Keyword::TABLE)?; let table_name = self.parse_object_name(false)?; let mut for_columns = false; let mut cache_metadata = false; @@ -1123,21 +828,21 @@ impl<'a> Parser<'a> { } Some(Keyword::NOSCAN) => noscan = true, Some(Keyword::FOR) => { - self.expect_keyword_is(Keyword::COLUMNS)?; + self.expect_keyword(Keyword::COLUMNS)?; columns = self .maybe_parse(|parser| { - parser.parse_comma_separated(|p| p.parse_identifier()) + parser.parse_comma_separated(|p| p.parse_identifier(false)) })? .unwrap_or_default(); for_columns = true } Some(Keyword::CACHE) => { - self.expect_keyword_is(Keyword::METADATA)?; + self.expect_keyword(Keyword::METADATA)?; cache_metadata = true } Some(Keyword::COMPUTE) => { - self.expect_keyword_is(Keyword::STATISTICS)?; + self.expect_keyword(Keyword::STATISTICS)?; compute_statistics = true } _ => break, @@ -1145,7 +850,6 @@ impl<'a> Parser<'a> { } Ok(Statement::Analyze { - has_table_keyword, table_name, for_columns, columns, @@ -1165,7 +869,7 @@ impl<'a> Parser<'a> { t @ (Token::Word(_) | Token::SingleQuotedString(_)) => { if self.peek_token().token == Token::Period { let mut id_parts: Vec = vec![match t { - Token::Word(w) => w.into_ident(next_token.span), + Token::Word(w) => w.to_ident(), Token::SingleQuotedString(s) => Ident::with_quote('\'', s), _ => unreachable!(), // We matched above }]; @@ -1173,16 +877,13 @@ impl<'a> Parser<'a> { while self.consume_token(&Token::Period) { let next_token = self.next_token(); match next_token.token { - Token::Word(w) => id_parts.push(w.into_ident(next_token.span)), + Token::Word(w) => id_parts.push(w.to_ident()), Token::SingleQuotedString(s) => { // SQLite has single-quoted identifiers id_parts.push(Ident::with_quote('\'', s)) } Token::Mul => { - return Ok(Expr::QualifiedWildcard( - ObjectName::from(id_parts), - AttachedToken(next_token), - )); + return Ok(Expr::QualifiedWildcard(ObjectName(id_parts))); } _ => { return self @@ -1193,7 +894,7 @@ impl<'a> Parser<'a> { } } Token::Mul => { - return Ok(Expr::Wildcard(AttachedToken(next_token))); + return Ok(Expr::Wildcard); } _ => (), }; @@ -1207,48 +908,20 @@ impl<'a> Parser<'a> { self.parse_subexpr(self.dialect.prec_unknown()) } - pub fn parse_expr_with_alias_and_order_by( - &mut self, - ) -> Result { - let expr = self.parse_expr()?; - - fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool { - explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw) - } - let alias = self.parse_optional_alias_inner(None, validator)?; - let order_by = OrderByOptions { - asc: self.parse_asc_desc(), - nulls_first: None, - }; - Ok(ExprWithAliasAndOrderBy { - expr: ExprWithAlias { expr, alias }, - order_by, - }) - } - /// Parse tokens until the precedence changes. pub fn parse_subexpr(&mut self, precedence: u8) -> Result { let _guard = self.recursion_counter.try_decrease()?; debug!("parsing expr"); let mut expr = self.parse_prefix()?; - - expr = self.parse_compound_expr(expr, vec![])?; - - debug!("prefix: {expr:?}"); + debug!("prefix: {:?}", expr); loop { let next_precedence = self.get_next_precedence()?; - debug!("next precedence: {next_precedence:?}"); + debug!("next precedence: {:?}", next_precedence); if precedence >= next_precedence { break; } - // The period operator is handled exclusively by the - // compound field access parsing. - if Token::Period == self.peek_token_ref().token { - break; - } - expr = self.parse_infix(expr, next_precedence)?; } Ok(expr) @@ -1266,39 +939,24 @@ impl<'a> Parser<'a> { } pub fn parse_savepoint(&mut self) -> Result { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; Ok(Statement::Savepoint { name }) } pub fn parse_release(&mut self) -> Result { let _ = self.parse_keyword(Keyword::SAVEPOINT); - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; Ok(Statement::ReleaseSavepoint { name }) } pub fn parse_listen(&mut self) -> Result { - let channel = self.parse_identifier()?; + let channel = self.parse_identifier(false)?; Ok(Statement::LISTEN { channel }) } - pub fn parse_unlisten(&mut self) -> Result { - let channel = if self.consume_token(&Token::Mul) { - Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string()) - } else { - match self.parse_identifier() { - Ok(expr) => expr, - _ => { - self.prev_token(); - return self.expected("wildcard or identifier", self.peek_token()); - } - } - }; - Ok(Statement::UNLISTEN { channel }) - } - pub fn parse_notify(&mut self) -> Result { - let channel = self.parse_identifier()?; + let channel = self.parse_identifier(false)?; let payload = if self.consume_token(&Token::Comma) { Some(self.parse_literal_string()?) } else { @@ -1307,191 +965,6 @@ impl<'a> Parser<'a> { Ok(Statement::NOTIFY { channel, payload }) } - /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable] - pub fn parse_rename(&mut self) -> Result { - if self.peek_keyword(Keyword::TABLE) { - self.expect_keyword(Keyword::TABLE)?; - let rename_tables = self.parse_comma_separated(|parser| { - let old_name = parser.parse_object_name(false)?; - parser.expect_keyword(Keyword::TO)?; - let new_name = parser.parse_object_name(false)?; - - Ok(RenameTable { old_name, new_name }) - })?; - Ok(Statement::RenameTable(rename_tables)) - } else { - self.expected("KEYWORD `TABLE` after RENAME", self.peek_token()) - } - } - - /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect. - /// Returns `None if no match is found. - fn parse_expr_prefix_by_reserved_word( - &mut self, - w: &Word, - w_span: Span, - ) -> Result, ParserError> { - match w.keyword { - Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => { - self.prev_token(); - Ok(Some(Expr::Value(self.parse_value()?))) - } - Keyword::NULL => { - self.prev_token(); - Ok(Some(Expr::Value(self.parse_value()?))) - } - Keyword::CURRENT_CATALOG - | Keyword::CURRENT_USER - | Keyword::SESSION_USER - | Keyword::USER - if dialect_of!(self is PostgreSqlDialect | GenericDialect) => - { - Ok(Some(Expr::Function(Function { - name: ObjectName::from(vec![w.clone().into_ident(w_span)]), - uses_odbc_syntax: false, - parameters: FunctionArguments::None, - args: FunctionArguments::None, - null_treatment: None, - filter: None, - over: None, - within_group: vec![], - }))) - } - Keyword::CURRENT_TIMESTAMP - | Keyword::CURRENT_TIME - | Keyword::CURRENT_DATE - | Keyword::LOCALTIME - | Keyword::LOCALTIMESTAMP => { - Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?)) - } - Keyword::CASE => Ok(Some(self.parse_case_expr()?)), - Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)), - Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)), - Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)), - Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)), - Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)), - Keyword::EXISTS - // Support parsing Databricks has a function named `exists`. - if !dialect_of!(self is DatabricksDialect) - || matches!( - self.peek_nth_token_ref(1).token, - Token::Word(Word { - keyword: Keyword::SELECT | Keyword::WITH, - .. - }) - ) => - { - Ok(Some(self.parse_exists_expr(false)?)) - } - Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)), - Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)), - Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)), - Keyword::POSITION if self.peek_token_ref().token == Token::LParen => { - Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?)) - } - Keyword::SUBSTR | Keyword::SUBSTRING => { - self.prev_token(); - Ok(Some(self.parse_substring()?)) - } - Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)), - Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)), - Keyword::INTERVAL => Ok(Some(self.parse_interval()?)), - // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call - Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => { - self.expect_token(&Token::LBracket)?; - Ok(Some(self.parse_array_expr(true)?)) - } - Keyword::ARRAY - if self.peek_token() == Token::LParen - && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) => - { - self.expect_token(&Token::LParen)?; - let query = self.parse_query()?; - self.expect_token(&Token::RParen)?; - Ok(Some(Expr::Function(Function { - name: ObjectName::from(vec![w.clone().into_ident(w_span)]), - uses_odbc_syntax: false, - parameters: FunctionArguments::None, - args: FunctionArguments::Subquery(query), - filter: None, - null_treatment: None, - over: None, - within_group: vec![], - }))) - } - Keyword::NOT => Ok(Some(self.parse_not()?)), - Keyword::MATCH if self.dialect.supports_match_against() => { - Ok(Some(self.parse_match_against()?)) - } - Keyword::STRUCT if self.dialect.supports_struct_literal() => { - let struct_expr = self.parse_struct_literal()?; - Ok(Some(struct_expr)) - } - Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => { - let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?; - Ok(Some(Expr::Prior(Box::new(expr)))) - } - Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => { - Ok(Some(self.parse_duckdb_map_literal()?)) - } - _ if self.dialect.supports_geometric_types() => match w.keyword { - Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)), - Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)), - Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)), - Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)), - Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)), - Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)), - Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)), - _ => Ok(None), - }, - _ => Ok(None), - } - } - - /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect. - fn parse_expr_prefix_by_unreserved_word( - &mut self, - w: &Word, - w_span: Span, - ) -> Result { - match self.peek_token().token { - Token::LParen if !self.peek_outer_join_operator() => { - let id_parts = vec![w.clone().into_ident(w_span)]; - self.parse_function(ObjectName::from(id_parts)) - } - // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html - Token::SingleQuotedString(_) - | Token::DoubleQuotedString(_) - | Token::HexStringLiteral(_) - if w.value.starts_with('_') => - { - Ok(Expr::Prefixed { - prefix: w.clone().into_ident(w_span), - value: self.parse_introduced_string_expr()?.into(), - }) - } - // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html - Token::SingleQuotedString(_) - | Token::DoubleQuotedString(_) - | Token::HexStringLiteral(_) - if w.value.starts_with('_') => - { - Ok(Expr::Prefixed { - prefix: w.clone().into_ident(w_span), - value: self.parse_introduced_string_expr()?.into(), - }) - } - Token::Arrow if self.dialect.supports_lambda_functions() => { - self.expect_token(&Token::Arrow)?; - Ok(Expr::Lambda(LambdaFunction { - params: OneOrManyWithParens::One(w.clone().into_ident(w_span)), - body: Box::new(self.parse_expr()?), - })) - } - _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))), - } - } - /// Parse an expression prefix. pub fn parse_prefix(&mut self) -> Result { // allow the dialect to override prefix parsing @@ -1515,7 +988,7 @@ impl<'a> Parser<'a> { // Note also that naively `SELECT date` looks like a syntax error because the `date` type // name is not followed by a string literal, but in fact in PostgreSQL it is a valid // expression that should parse as the column name "date". - let loc = self.peek_token_ref().span.start; + let loc = self.peek_token().location; let opt_expr = self.maybe_parse(|parser| { match parser.parse_data_type()? { DataType::Interval => parser.parse_interval(), @@ -1529,7 +1002,7 @@ impl<'a> Parser<'a> { DataType::Custom(..) => parser_err!("dummy", loc), data_type => Ok(Expr::TypedString { data_type, - value: parser.parse_value()?, + value: parser.parse_literal_string()?, }), } })?; @@ -1538,55 +1011,178 @@ impl<'a> Parser<'a> { return Ok(expr); } - // Cache some dialect properties to avoid lifetime issues with the - // next_token reference. - - let dialect = self.dialect; - - self.advance_token(); - let next_token_index = self.get_current_index(); - let next_token = self.get_current_token(); - let span = next_token.span; - let expr = match &next_token.token { - Token::Word(w) => { - // The word we consumed may fall into one of two cases: it has a special meaning, or not. - // For example, in Snowflake, the word `interval` may have two meanings depending on the context: - // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;` - // ^^^^^^^^^^^^^^^^ ^^^^^^^^ - // interval expression identifier - // - // We first try to parse the word and following tokens as a special expression, and if that fails, - // we rollback and try to parse it as an identifier. - let w = w.clone(); - match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) { - // This word indicated an expression prefix and parsing was successful - Ok(Some(expr)) => Ok(expr), - - // No expression prefix associated with this word - Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?), - - // If parsing of the word as a special expression failed, we are facing two options: - // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`) - // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl` - // We first try to parse the word as an identifier and if that fails - // we rollback and return the parsing error we got from trying to parse a - // special expression (to maintain backwards compatibility of parsing errors). - Err(e) => { - if !self.dialect.is_reserved_for_identifier(w.keyword) { - if let Ok(Some(expr)) = self.maybe_parse(|parser| { - parser.parse_expr_prefix_by_unreserved_word(&w, span) - }) { - return Ok(expr); + let next_token = self.next_token(); + let expr = match next_token.token { + Token::Word(w) => match w.keyword { + Keyword::TRUE | Keyword::FALSE | Keyword::NULL => { + self.prev_token(); + Ok(Expr::Value(self.parse_value()?)) + } + Keyword::CURRENT_CATALOG + | Keyword::CURRENT_USER + | Keyword::SESSION_USER + | Keyword::USER + if dialect_of!(self is PostgreSqlDialect | GenericDialect) => + { + Ok(Expr::Function(Function { + name: ObjectName(vec![w.to_ident()]), + parameters: FunctionArguments::None, + args: FunctionArguments::None, + null_treatment: None, + filter: None, + over: None, + within_group: vec![], + })) + } + Keyword::CURRENT_TIMESTAMP + | Keyword::CURRENT_TIME + | Keyword::CURRENT_DATE + | Keyword::LOCALTIME + | Keyword::LOCALTIMESTAMP => { + self.parse_time_functions(ObjectName(vec![w.to_ident()])) + } + Keyword::CASE => self.parse_case_expr(), + Keyword::CONVERT => self.parse_convert_expr(false), + Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => self.parse_convert_expr(true), + Keyword::CAST => self.parse_cast_expr(CastKind::Cast), + Keyword::TRY_CAST => self.parse_cast_expr(CastKind::TryCast), + Keyword::SAFE_CAST => self.parse_cast_expr(CastKind::SafeCast), + Keyword::EXISTS + // Support parsing Databricks has a function named `exists`. + if !dialect_of!(self is DatabricksDialect) + || matches!( + self.peek_nth_token(1).token, + Token::Word(Word { + keyword: Keyword::SELECT | Keyword::WITH, + .. + }) + ) => + { + self.parse_exists_expr(false) + } + Keyword::EXTRACT => self.parse_extract_expr(), + Keyword::CEIL => self.parse_ceil_floor_expr(true), + Keyword::FLOOR => self.parse_ceil_floor_expr(false), + Keyword::POSITION if self.peek_token().token == Token::LParen => { + self.parse_position_expr(w.to_ident()) + } + Keyword::SUBSTRING => self.parse_substring_expr(), + Keyword::OVERLAY => self.parse_overlay_expr(), + Keyword::TRIM => self.parse_trim_expr(), + Keyword::INTERVAL => self.parse_interval(), + // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call + Keyword::ARRAY if self.peek_token() == Token::LBracket => { + self.expect_token(&Token::LBracket)?; + self.parse_array_expr(true) + } + Keyword::ARRAY + if self.peek_token() == Token::LParen + && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) => + { + self.expect_token(&Token::LParen)?; + let query = self.parse_query()?; + self.expect_token(&Token::RParen)?; + Ok(Expr::Function(Function { + name: ObjectName(vec![w.to_ident()]), + parameters: FunctionArguments::None, + args: FunctionArguments::Subquery(query), + filter: None, + null_treatment: None, + over: None, + within_group: vec![], + })) + } + Keyword::NOT => self.parse_not(), + Keyword::MATCH if dialect_of!(self is MySqlDialect | GenericDialect) => { + self.parse_match_against() + } + Keyword::STRUCT if dialect_of!(self is BigQueryDialect | GenericDialect) => { + self.prev_token(); + self.parse_bigquery_struct_literal() + } + Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => { + let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?; + Ok(Expr::Prior(Box::new(expr))) + } + Keyword::MAP if self.peek_token() == Token::LBrace && self.dialect.support_map_literal_syntax() => { + self.parse_duckdb_map_literal() + } + // Here `w` is a word, check if it's a part of a multipart + // identifier, a function call, or a simple identifier: + _ => match self.peek_token().token { + Token::LParen | Token::Period => { + let mut id_parts: Vec = vec![w.to_ident()]; + let mut ends_with_wildcard = false; + while self.consume_token(&Token::Period) { + let next_token = self.next_token(); + match next_token.token { + Token::Word(w) => id_parts.push(w.to_ident()), + Token::Mul => { + // Postgres explicitly allows funcnm(tablenm.*) and the + // function array_agg traverses this control flow + if dialect_of!(self is PostgreSqlDialect) { + ends_with_wildcard = true; + break; + } else { + return self + .expected("an identifier after '.'", next_token); + } + } + Token::SingleQuotedString(s) => { + id_parts.push(Ident::with_quote('\'', s)) + } + _ => { + return self + .expected("an identifier or a '*' after '.'", next_token); + } } } - return Err(e); + + if ends_with_wildcard { + Ok(Expr::QualifiedWildcard(ObjectName(id_parts))) + } else if self.consume_token(&Token::LParen) { + if dialect_of!(self is SnowflakeDialect | MsSqlDialect) + && self.consume_tokens(&[Token::Plus, Token::RParen]) + { + Ok(Expr::OuterJoin(Box::new( + match <[Ident; 1]>::try_from(id_parts) { + Ok([ident]) => Expr::Identifier(ident), + Err(parts) => Expr::CompoundIdentifier(parts), + }, + ))) + } else { + self.prev_token(); + self.parse_function(ObjectName(id_parts)) + } + } else { + Ok(Expr::CompoundIdentifier(id_parts)) + } } - } - } // End of Token::Word + // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html + Token::SingleQuotedString(_) + | Token::DoubleQuotedString(_) + | Token::HexStringLiteral(_) + if w.value.starts_with('_') => + { + Ok(Expr::IntroducedString { + introducer: w.value, + value: self.parse_introduced_string_value()?, + }) + } + Token::Arrow if self.dialect.supports_lambda_functions() => { + self.expect_token(&Token::Arrow)?; + return Ok(Expr::Lambda(LambdaFunction { + params: OneOrManyWithParens::One(w.to_ident()), + body: Box::new(self.parse_expr()?), + })); + } + _ => Ok(Expr::Identifier(w.to_ident())), + }, + }, // End of Token::Word // array `[1, 2, 3]` Token::LBracket => self.parse_array_expr(false), tok @ Token::Minus | tok @ Token::Plus => { - let op = if *tok == Token::Plus { + let op = if tok == Token::Plus { UnaryOperator::Plus } else { UnaryOperator::Minus @@ -1598,16 +1194,12 @@ impl<'a> Parser<'a> { ), }) } - Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp { - op: UnaryOperator::BangNot, - expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?), - }), tok @ Token::DoubleExclamationMark | tok @ Token::PGSquareRoot | tok @ Token::PGCubeRoot | tok @ Token::AtSign | tok @ Token::Tilde - if dialect_is!(dialect is PostgreSqlDialect) => + if dialect_of!(self is PostgreSqlDialect) => { let op = match tok { Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial, @@ -1624,33 +1216,7 @@ impl<'a> Parser<'a> { ), }) } - tok @ Token::Sharp - | tok @ Token::AtDashAt - | tok @ Token::AtAt - | tok @ Token::QuestionMarkDash - | tok @ Token::QuestionPipe - if self.dialect.supports_geometric_types() => - { - let op = match tok { - Token::Sharp => UnaryOperator::Hash, - Token::AtDashAt => UnaryOperator::AtDashAt, - Token::AtAt => UnaryOperator::DoubleAt, - Token::QuestionMarkDash => UnaryOperator::QuestionDash, - Token::QuestionPipe => UnaryOperator::QuestionPipe, - _ => { - return Err(ParserError::ParserError(format!( - "Unexpected token in unary operator parsing: {tok:?}" - ))) - } - }; - Ok(Expr::UnaryOp { - op, - expr: Box::new( - self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?, - ), - }) - } - Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => + Token::EscapedStringLiteral(_) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => { self.prev_token(); Ok(Expr::Value(self.parse_value()?)) @@ -1692,17 +1258,34 @@ impl<'a> Parser<'a> { } }; self.expect_token(&Token::RParen)?; - Ok(expr) + if !self.consume_token(&Token::Period) { + Ok(expr) + } else { + let tok = self.next_token(); + let key = match tok.token { + Token::Word(word) => word.to_ident(), + _ => { + return parser_err!( + format!("Expected identifier, found: {tok}"), + tok.location + ) + } + }; + Ok(Expr::CompositeAccess { + expr: Box::new(expr), + key, + }) + } } Token::Placeholder(_) | Token::Colon | Token::AtSign => { self.prev_token(); Ok(Expr::Value(self.parse_value()?)) } - Token::LBrace => { + Token::LBrace if self.dialect.supports_dictionary_syntax() => { self.prev_token(); - self.parse_lbrace_expr() + self.parse_duckdb_struct_literal() } - _ => self.expected_at("an expression", next_token_index), + _ => self.expected("an expression", next_token), }?; if self.parse_keyword(Keyword::COLLATE) { @@ -1715,260 +1298,6 @@ impl<'a> Parser<'a> { } } - fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result { - Ok(Expr::TypedString { - data_type: DataType::GeometricType(kind), - value: self.parse_value()?, - }) - } - - /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`. - /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead. - /// If only the root exists, return the root. - /// Parses compound expressions which may be delimited by period - /// or bracket notation. - /// For example: `a.b.c`, `a.b[1]`. - pub fn parse_compound_expr( - &mut self, - root: Expr, - mut chain: Vec, - ) -> Result { - let mut ending_wildcard: Option = None; - loop { - if self.consume_token(&Token::Period) { - let next_token = self.peek_token_ref(); - match &next_token.token { - Token::Mul => { - // Postgres explicitly allows funcnm(tablenm.*) and the - // function array_agg traverses this control flow - if dialect_of!(self is PostgreSqlDialect) { - ending_wildcard = Some(self.next_token()); - } else { - // Put back the consumed `.` tokens before exiting. - // If this expression is being parsed in the - // context of a projection, then the `.*` could imply - // a wildcard expansion. For example: - // `SELECT STRUCT('foo').* FROM T` - self.prev_token(); // . - } - - break; - } - Token::SingleQuotedString(s) => { - let expr = - Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s)); - chain.push(AccessExpr::Dot(expr)); - self.advance_token(); // The consumed string - } - // Fallback to parsing an arbitrary expression. - _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? { - // If we get back a compound field access or identifier, - // we flatten the nested expression. - // For example if the current root is `foo` - // and we get back a compound identifier expression `bar.baz` - // The full expression should be `foo.bar.baz` (i.e. - // a root with an access chain with 2 entries) and not - // `foo.(bar.baz)` (i.e. a root with an access chain with - // 1 entry`). - Expr::CompoundFieldAccess { root, access_chain } => { - chain.push(AccessExpr::Dot(*root)); - chain.extend(access_chain); - } - Expr::CompoundIdentifier(parts) => chain - .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)), - expr => { - chain.push(AccessExpr::Dot(expr)); - } - }, - } - } else if !self.dialect.supports_partiql() - && self.peek_token_ref().token == Token::LBracket - { - self.parse_multi_dim_subscript(&mut chain)?; - } else { - break; - } - } - - let tok_index = self.get_current_index(); - if let Some(wildcard_token) = ending_wildcard { - if !Self::is_all_ident(&root, &chain) { - return self.expected("an identifier or a '*' after '.'", self.peek_token()); - }; - Ok(Expr::QualifiedWildcard( - ObjectName::from(Self::exprs_to_idents(root, chain)?), - AttachedToken(wildcard_token), - )) - } else if self.maybe_parse_outer_join_operator() { - if !Self::is_all_ident(&root, &chain) { - return self.expected_at("column identifier before (+)", tok_index); - }; - let expr = if chain.is_empty() { - root - } else { - Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?) - }; - Ok(Expr::OuterJoin(expr.into())) - } else { - Self::build_compound_expr(root, chain) - } - } - - /// Combines a root expression and access chain to form - /// a compound expression. Which may be a [Expr::CompoundFieldAccess] - /// or other special cased expressions like [Expr::CompoundIdentifier], - /// [Expr::OuterJoin]. - fn build_compound_expr( - root: Expr, - mut access_chain: Vec, - ) -> Result { - if access_chain.is_empty() { - return Ok(root); - } - - if Self::is_all_ident(&root, &access_chain) { - return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents( - root, - access_chain, - )?)); - } - - // Flatten qualified function calls. - // For example, the expression `a.b.c.foo(1,2,3)` should - // represent a function called `a.b.c.foo`, rather than - // a composite expression. - if matches!(root, Expr::Identifier(_)) - && matches!( - access_chain.last(), - Some(AccessExpr::Dot(Expr::Function(_))) - ) - && access_chain - .iter() - .rev() - .skip(1) // All except the Function - .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_)))) - { - let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else { - return parser_err!("expected function expression", root.span().start); - }; - - let compound_func_name = [root] - .into_iter() - .chain(access_chain.into_iter().flat_map(|access| match access { - AccessExpr::Dot(expr) => Some(expr), - _ => None, - })) - .flat_map(|expr| match expr { - Expr::Identifier(ident) => Some(ident), - _ => None, - }) - .map(ObjectNamePart::Identifier) - .chain(func.name.0) - .collect::>(); - func.name = ObjectName(compound_func_name); - - return Ok(Expr::Function(func)); - } - - // Flatten qualified outer join expressions. - // For example, the expression `T.foo(+)` should - // represent an outer join on the column name `T.foo` - // rather than a composite expression. - if access_chain.len() == 1 - && matches!( - access_chain.last(), - Some(AccessExpr::Dot(Expr::OuterJoin(_))) - ) - { - let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else { - return parser_err!("expected (+) expression", root.span().start); - }; - - if !Self::is_all_ident(&root, &[]) { - return parser_err!("column identifier before (+)", root.span().start); - }; - - let token_start = root.span().start; - let mut idents = Self::exprs_to_idents(root, vec![])?; - match *inner_expr { - Expr::CompoundIdentifier(suffix) => idents.extend(suffix), - Expr::Identifier(suffix) => idents.push(suffix), - _ => { - return parser_err!("column identifier before (+)", token_start); - } - } - - return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into())); - } - - Ok(Expr::CompoundFieldAccess { - root: Box::new(root), - access_chain, - }) - } - - fn keyword_to_modifier(k: Keyword) -> Option { - match k { - Keyword::LOCAL => Some(ContextModifier::Local), - Keyword::GLOBAL => Some(ContextModifier::Global), - Keyword::SESSION => Some(ContextModifier::Session), - _ => None, - } - } - - /// Check if the root is an identifier and all fields are identifiers. - fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool { - if !matches!(root, Expr::Identifier(_)) { - return false; - } - fields - .iter() - .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_)))) - } - - /// Convert a root and a list of fields to a list of identifiers. - fn exprs_to_idents(root: Expr, fields: Vec) -> Result, ParserError> { - let mut idents = vec![]; - if let Expr::Identifier(root) = root { - idents.push(root); - for x in fields { - if let AccessExpr::Dot(Expr::Identifier(ident)) = x { - idents.push(ident); - } else { - return parser_err!( - format!("Expected identifier, found: {}", x), - x.span().start - ); - } - } - Ok(idents) - } else { - parser_err!( - format!("Expected identifier, found: {}", root), - root.span().start - ) - } - } - - /// Returns true if the next tokens indicate the outer join operator `(+)`. - fn peek_outer_join_operator(&mut self) -> bool { - if !self.dialect.supports_outer_join_operator() { - return false; - } - - let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref(); - Token::LParen == maybe_lparen.token - && Token::Plus == maybe_plus.token - && Token::RParen == maybe_rparen.token - } - - /// If the next tokens indicates the outer join operator `(+)`, consume - /// the tokens and return true. - fn maybe_parse_outer_join_operator(&mut self) -> bool { - self.dialect.supports_outer_join_operator() - && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen]) - } - pub fn parse_utility_options(&mut self) -> Result, ParserError> { self.expect_token(&Token::LParen)?; let options = self.parse_comma_separated(Self::parse_utility_option)?; @@ -1978,7 +1307,7 @@ impl<'a> Parser<'a> { } fn parse_utility_option(&mut self) -> Result { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let next_token = self.peek_token(); if next_token == Token::Comma || next_token == Token::RParen { @@ -2005,7 +1334,7 @@ impl<'a> Parser<'a> { return Ok(None); } self.maybe_parse(|p| { - let params = p.parse_comma_separated(|p| p.parse_identifier())?; + let params = p.parse_comma_separated(|p| p.parse_identifier(false))?; p.expect_token(&Token::RParen)?; p.expect_token(&Token::Arrow)?; let expr = p.parse_expr()?; @@ -2016,29 +1345,7 @@ impl<'a> Parser<'a> { }) } - /// Tries to parse the body of an [ODBC function] call. - /// i.e. without the enclosing braces - /// - /// ```sql - /// fn myfunc(1,2,3) - /// ``` - /// - /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017 - fn maybe_parse_odbc_fn_body(&mut self) -> Result, ParserError> { - self.maybe_parse(|p| { - p.expect_keyword(Keyword::FN)?; - let fn_name = p.parse_object_name(false)?; - let mut fn_call = p.parse_function_call(fn_name)?; - fn_call.uses_odbc_syntax = true; - Ok(Expr::Function(fn_call)) - }) - } - pub fn parse_function(&mut self, name: ObjectName) -> Result { - self.parse_function_call(name).map(Expr::Function) - } - - fn parse_function_call(&mut self, name: ObjectName) -> Result { self.expect_token(&Token::LParen)?; // Snowflake permits a subquery to be passed as an argument without @@ -2046,16 +1353,15 @@ impl<'a> Parser<'a> { if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() { let subquery = self.parse_query()?; self.expect_token(&Token::RParen)?; - return Ok(Function { + return Ok(Expr::Function(Function { name, - uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::Subquery(subquery), filter: None, null_treatment: None, over: None, within_group: vec![], - }); + })); } let mut args = self.parse_function_argument_list()?; @@ -2108,29 +1414,28 @@ impl<'a> Parser<'a> { let window_spec = self.parse_window_spec()?; Some(WindowType::WindowSpec(window_spec)) } else { - Some(WindowType::NamedWindow(self.parse_identifier()?)) + Some(WindowType::NamedWindow(self.parse_identifier(false)?)) } } else { None }; - Ok(Function { + Ok(Expr::Function(Function { name, - uses_odbc_syntax: false, parameters, args: FunctionArguments::List(args), null_treatment, filter, over, within_group, - }) + })) } /// Optionally parses a null treatment clause. fn parse_null_treatment(&mut self) -> Result, ParserError> { match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) { Some(keyword) => { - self.expect_keyword_is(Keyword::NULLS)?; + self.expect_keyword(Keyword::NULLS)?; Ok(match keyword { Keyword::RESPECT => Some(NullTreatment::RespectNulls), @@ -2150,7 +1455,6 @@ impl<'a> Parser<'a> { }; Ok(Expr::Function(Function { name, - uses_odbc_syntax: false, parameters: FunctionArguments::None, args, filter: None, @@ -2177,7 +1481,7 @@ impl<'a> Parser<'a> { let units = self.parse_window_frame_units()?; let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) { let start_bound = self.parse_window_frame_bound()?; - self.expect_keyword_is(Keyword::AND)?; + self.expect_keyword(Keyword::AND)?; let end_bound = Some(self.parse_window_frame_bound()?); (start_bound, end_bound) } else { @@ -2280,18 +1584,17 @@ impl<'a> Parser<'a> { } pub fn parse_case_expr(&mut self) -> Result { - let case_token = AttachedToken(self.get_current_token().clone()); let mut operand = None; if !self.parse_keyword(Keyword::WHEN) { operand = Some(Box::new(self.parse_expr()?)); - self.expect_keyword_is(Keyword::WHEN)?; + self.expect_keyword(Keyword::WHEN)?; } let mut conditions = vec![]; + let mut results = vec![]; loop { - let condition = self.parse_expr()?; - self.expect_keyword_is(Keyword::THEN)?; - let result = self.parse_expr()?; - conditions.push(CaseWhen { condition, result }); + conditions.push(self.parse_expr()?); + self.expect_keyword(Keyword::THEN)?; + results.push(self.parse_expr()?); if !self.parse_keyword(Keyword::WHEN) { break; } @@ -2301,19 +1604,18 @@ impl<'a> Parser<'a> { } else { None }; - let end_token = AttachedToken(self.expect_keyword(Keyword::END)?); + self.expect_keyword(Keyword::END)?; Ok(Expr::Case { - case_token, - end_token, operand, conditions, + results, else_result, }) } pub fn parse_optional_cast_format(&mut self) -> Result, ParserError> { if self.parse_keyword(Keyword::FORMAT) { - let value = self.parse_value()?.value; + let value = self.parse_value()?; match self.parse_optional_time_zone()? { Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))), None => Ok(Some(CastFormat::Value(value))), @@ -2325,7 +1627,7 @@ impl<'a> Parser<'a> { pub fn parse_optional_time_zone(&mut self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) { - self.parse_value().map(|v| Some(v.value)) + self.parse_value().map(Some) } else { Ok(None) } @@ -2397,7 +1699,7 @@ impl<'a> Parser<'a> { pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result { self.expect_token(&Token::LParen)?; let expr = self.parse_expr()?; - self.expect_keyword_is(Keyword::AS)?; + self.expect_keyword(Keyword::AS)?; let data_type = self.parse_data_type()?; let format = self.parse_optional_cast_format()?; self.expect_token(&Token::RParen)?; @@ -2454,7 +1756,7 @@ impl<'a> Parser<'a> { CeilFloorKind::DateTimeField(self.parse_date_time_field()?) } else if self.consume_token(&Token::Comma) { // Parse `CEIL/FLOOR(expr, scale)` - match self.parse_value()?.value { + match self.parse_value()? { Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)), _ => { return Err(ParserError::ParserError( @@ -2487,7 +1789,7 @@ impl<'a> Parser<'a> { // Parse the subexpr till the IN keyword let expr = p.parse_subexpr(between_prec)?; - p.expect_keyword_is(Keyword::IN)?; + p.expect_keyword(Keyword::IN)?; let from = p.parse_expr()?; p.expect_token(&Token::RParen)?; Ok(Expr::Position { @@ -2499,20 +1801,12 @@ impl<'a> Parser<'a> { Some(expr) => Ok(expr), // Snowflake supports `position` as an ordinary function call // without the special `IN` syntax. - None => self.parse_function(ObjectName::from(vec![ident])), + None => self.parse_function(ObjectName(vec![ident])), } } - // { SUBSTRING | SUBSTR } ( [FROM 1] [FOR 3]) - pub fn parse_substring(&mut self) -> Result { - let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? { - Keyword::SUBSTR => true, - Keyword::SUBSTRING => false, - _ => { - self.prev_token(); - return self.expected("SUBSTR or SUBSTRING", self.peek_token()); - } - }; + pub fn parse_substring_expr(&mut self) -> Result { + // PARSE SUBSTRING (EXPR [FROM 1] [FOR 3]) self.expect_token(&Token::LParen)?; let expr = self.parse_expr()?; let mut from_expr = None; @@ -2532,7 +1826,6 @@ impl<'a> Parser<'a> { substring_from: from_expr.map(Box::new), substring_for: to_expr.map(Box::new), special, - shorthand, }) } @@ -2540,9 +1833,9 @@ impl<'a> Parser<'a> { // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3]) self.expect_token(&Token::LParen)?; let expr = self.parse_expr()?; - self.expect_keyword_is(Keyword::PLACING)?; + self.expect_keyword(Keyword::PLACING)?; let what_expr = self.parse_expr()?; - self.expect_keyword_is(Keyword::FROM)?; + self.expect_keyword(Keyword::FROM)?; let from_expr = self.parse_expr()?; let mut for_expr = None; if self.parse_keyword(Keyword::FOR) { @@ -2567,7 +1860,10 @@ impl<'a> Parser<'a> { self.expect_token(&Token::LParen)?; let mut trim_where = None; if let Token::Word(word) = self.peek_token().token { - if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) { + if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING] + .iter() + .any(|d| word.keyword == *d) + { trim_where = Some(self.parse_trim_where()?); } } @@ -2583,7 +1879,7 @@ impl<'a> Parser<'a> { trim_characters: None, }) } else if self.consume_token(&Token::Comma) - && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect) + && dialect_of!(self is SnowflakeDialect | BigQueryDialect | GenericDialect) { let characters = self.parse_comma_separated(Parser::parse_expr)?; self.expect_token(&Token::RParen)?; @@ -2630,7 +1926,7 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::ERROR) { Ok(Some(ListAggOnOverflow::Error)) } else { - self.expect_keyword_is(Keyword::TRUNCATE)?; + self.expect_keyword(Keyword::TRUNCATE)?; let filler = match self.peek_token().token { Token::Word(w) if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT => @@ -2651,7 +1947,7 @@ impl<'a> Parser<'a> { if !with_count && !self.parse_keyword(Keyword::WITHOUT) { self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?; } - self.expect_keyword_is(Keyword::COUNT)?; + self.expect_keyword(Keyword::COUNT)?; Ok(Some(ListAggOnOverflow::Truncate { filler, with_count })) } } else { @@ -2668,14 +1964,12 @@ impl<'a> Parser<'a> { match &next_token.token { Token::Word(w) => match w.keyword { Keyword::YEAR => Ok(DateTimeField::Year), - Keyword::YEARS => Ok(DateTimeField::Years), Keyword::MONTH => Ok(DateTimeField::Month), - Keyword::MONTHS => Ok(DateTimeField::Months), Keyword::WEEK => { let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect) && self.consume_token(&Token::LParen) { - let week_day = self.parse_identifier()?; + let week_day = self.parse_identifier(false)?; self.expect_token(&Token::RParen)?; Some(week_day) } else { @@ -2683,19 +1977,14 @@ impl<'a> Parser<'a> { }; Ok(DateTimeField::Week(week_day)) } - Keyword::WEEKS => Ok(DateTimeField::Weeks), Keyword::DAY => Ok(DateTimeField::Day), Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek), Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear), - Keyword::DAYS => Ok(DateTimeField::Days), Keyword::DATE => Ok(DateTimeField::Date), Keyword::DATETIME => Ok(DateTimeField::Datetime), Keyword::HOUR => Ok(DateTimeField::Hour), - Keyword::HOURS => Ok(DateTimeField::Hours), Keyword::MINUTE => Ok(DateTimeField::Minute), - Keyword::MINUTES => Ok(DateTimeField::Minutes), Keyword::SECOND => Ok(DateTimeField::Second), - Keyword::SECONDS => Ok(DateTimeField::Seconds), Keyword::CENTURY => Ok(DateTimeField::Century), Keyword::DECADE => Ok(DateTimeField::Decade), Keyword::DOY => Ok(DateTimeField::Doy), @@ -2722,14 +2011,14 @@ impl<'a> Parser<'a> { Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion), _ if self.dialect.allow_extract_custom() => { self.prev_token(); - let custom = self.parse_identifier()?; + let custom = self.parse_identifier(false)?; Ok(DateTimeField::Custom(custom)) } _ => self.expected("date/time field", next_token), }, Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => { self.prev_token(); - let custom = self.parse_identifier()?; + let custom = self.parse_identifier(false)?; Ok(DateTimeField::Custom(custom)) } _ => self.expected("date/time field", next_token), @@ -2758,45 +2047,20 @@ impl<'a> Parser<'a> { } } - /// Parse expression types that start with a left brace '{'. - /// Examples: - /// ```sql - /// -- Dictionary expr. - /// {'key1': 'value1', 'key2': 'value2'} - /// - /// -- Function call using the ODBC syntax. - /// { fn CONCAT('foo', 'bar') } - /// ``` - fn parse_lbrace_expr(&mut self) -> Result { - let token = self.expect_token(&Token::LBrace)?; - - if let Some(fn_expr) = self.maybe_parse_odbc_fn_body()? { - self.expect_token(&Token::RBrace)?; - return Ok(fn_expr); - } - - if self.dialect.supports_dictionary_syntax() { - self.prev_token(); // Put back the '{' - return self.parse_dictionary(); - } - - self.expected("an expression", token) - } - /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`] /// /// # Errors /// This method will raise an error if the column list is empty or with invalid identifiers, /// the match expression is not a literal string, or if the search modifier is not valid. pub fn parse_match_against(&mut self) -> Result { - let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?; + let columns = self.parse_parenthesized_column_list(Mandatory, false)?; - self.expect_keyword_is(Keyword::AGAINST)?; + self.expect_keyword(Keyword::AGAINST)?; self.expect_token(&Token::LParen)?; // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level. - let match_value = self.parse_value()?.value; + let match_value = self.parse_value()?; let in_natural_language_mode_keywords = &[ Keyword::IN, @@ -2875,7 +2139,7 @@ impl<'a> Parser<'a> { } else if self.dialect.require_interval_qualifier() { return parser_err!( "INTERVAL requires a unit after the literal value", - self.peek_token().span.start + self.peek_token().location ); } else { None @@ -2922,19 +2186,12 @@ impl<'a> Parser<'a> { matches!( word.keyword, Keyword::YEAR - | Keyword::YEARS | Keyword::MONTH - | Keyword::MONTHS | Keyword::WEEK - | Keyword::WEEKS | Keyword::DAY - | Keyword::DAYS | Keyword::HOUR - | Keyword::HOURS | Keyword::MINUTE - | Keyword::MINUTES | Keyword::SECOND - | Keyword::SECONDS | Keyword::CENTURY | Keyword::DECADE | Keyword::DOW @@ -2961,6 +2218,7 @@ impl<'a> Parser<'a> { } } + /// Bigquery specific: Parse a struct literal /// Syntax /// ```sql /// -- typed @@ -2968,19 +2226,13 @@ impl<'a> Parser<'a> { /// -- typeless /// STRUCT( expr1 [AS field_name] [, ... ]) /// ``` - fn parse_struct_literal(&mut self) -> Result { - // Parse the fields definition if exist `<[field_name] field_type, ...>` - self.prev_token(); + fn parse_bigquery_struct_literal(&mut self) -> Result { let (fields, trailing_bracket) = self.parse_struct_type_def(Self::parse_struct_field_def)?; if trailing_bracket.0 { - return parser_err!( - "unmatched > in STRUCT literal", - self.peek_token().span.start - ); + return parser_err!("unmatched > in STRUCT literal", self.peek_token().location); } - // Parse the struct values `(expr1 [, ... ])` self.expect_token(&Token::LParen)?; let values = self .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?; @@ -2989,13 +2241,13 @@ impl<'a> Parser<'a> { Ok(Expr::Struct { values, fields }) } - /// Parse an expression value for a struct literal + /// Parse an expression value for a bigquery struct [1] /// Syntax /// ```sql /// expr [AS name] /// ``` /// - /// For biquery [1], Parameter typed_syntax is set to true if the expression + /// Parameter typed_syntax is set to true if the expression /// is to be parsed as a field expression declared using typed /// struct syntax [2], and false if using typeless struct syntax [3]. /// @@ -3008,10 +2260,10 @@ impl<'a> Parser<'a> { if typed_syntax { return parser_err!("Typed syntax does not allow AS", { self.prev_token(); - self.peek_token().span.start + self.peek_token().location }); } - let field_name = self.parse_identifier()?; + let field_name = self.parse_identifier(false)?; Ok(Expr::Named { expr: expr.into(), name: field_name, @@ -3040,7 +2292,8 @@ impl<'a> Parser<'a> { where F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>, { - self.expect_keyword_is(Keyword::STRUCT)?; + let start_token = self.peek_token(); + self.expect_keyword(Keyword::STRUCT)?; // Nothing to do if we have no type information. if Token::Lt != self.peek_token() { @@ -3052,10 +2305,16 @@ impl<'a> Parser<'a> { let trailing_bracket = loop { let (def, trailing_bracket) = elem_parser(self)?; field_defs.push(def); - // The struct field definition is finished if it occurs `>>` or comma. - if trailing_bracket.0 || !self.consume_token(&Token::Comma) { + if !self.consume_token(&Token::Comma) { break trailing_bracket; } + + // Angle brackets are balanced so we only expect the trailing `>>` after + // we've matched all field types for the current struct. + // e.g. this is invalid syntax `STRUCT>>, INT>(NULL)` + if trailing_bracket.0 { + return parser_err!("unmatched > in STRUCT definition", start_token.location); + } }; Ok(( @@ -3066,16 +2325,15 @@ impl<'a> Parser<'a> { /// Duckdb Struct Data Type fn parse_duckdb_struct_type_def(&mut self) -> Result, ParserError> { - self.expect_keyword_is(Keyword::STRUCT)?; + self.expect_keyword(Keyword::STRUCT)?; self.expect_token(&Token::LParen)?; let struct_body = self.parse_comma_separated(|parser| { - let field_name = parser.parse_identifier()?; + let field_name = parser.parse_identifier(false)?; let field_type = parser.parse_data_type()?; Ok(StructField { field_name: Some(field_name), field_type, - options: None, }) }); self.expect_token(&Token::RParen)?; @@ -3104,17 +2362,15 @@ impl<'a> Parser<'a> { let field_name = if is_anonymous_field { None } else { - Some(self.parse_identifier()?) + Some(self.parse_identifier(false)?) }; let (field_type, trailing_bracket) = self.parse_data_type_helper()?; - let options = self.maybe_parse_options(Keyword::OPTIONS)?; Ok(( StructField { field_name, field_type, - options, }, trailing_bracket, )) @@ -3130,13 +2386,13 @@ impl<'a> Parser<'a> { /// /// [1]: https://duckdb.org/docs/sql/data_types/union.html fn parse_union_type_def(&mut self) -> Result, ParserError> { - self.expect_keyword_is(Keyword::UNION)?; + self.expect_keyword(Keyword::UNION)?; self.expect_token(&Token::LParen)?; let fields = self.parse_comma_separated(|p| { Ok(UnionField { - field_name: p.parse_identifier()?, + field_name: p.parse_identifier(false)?, field_type: p.parse_data_type()?, }) })?; @@ -3146,7 +2402,7 @@ impl<'a> Parser<'a> { Ok(fields) } - /// DuckDB and ClickHouse specific: Parse a duckdb [dictionary] or a clickhouse [map] setting + /// DuckDB specific: Parse a duckdb [dictionary] /// /// Syntax: /// @@ -3155,18 +2411,17 @@ impl<'a> Parser<'a> { /// ``` /// /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs - /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters - fn parse_dictionary(&mut self) -> Result { + fn parse_duckdb_struct_literal(&mut self) -> Result { self.expect_token(&Token::LBrace)?; - let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?; + let fields = self.parse_comma_separated(Self::parse_duckdb_dictionary_field)?; self.expect_token(&Token::RBrace)?; Ok(Expr::Dictionary(fields)) } - /// Parse a field for a duckdb [dictionary] or a clickhouse [map] setting + /// Parse a field for a duckdb [dictionary] /// /// Syntax /// @@ -3175,9 +2430,8 @@ impl<'a> Parser<'a> { /// ``` /// /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs - /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters - fn parse_dictionary_field(&mut self) -> Result { - let key = self.parse_identifier()?; + fn parse_duckdb_dictionary_field(&mut self) -> Result { + let key = self.parse_identifier(false)?; self.expect_token(&Token::Colon)?; @@ -3237,7 +2491,7 @@ impl<'a> Parser<'a> { /// /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> { - self.expect_keyword_is(Keyword::MAP)?; + self.expect_keyword(Keyword::MAP)?; self.expect_token(&Token::LParen)?; let key_data_type = self.parse_data_type()?; self.expect_token(&Token::Comma)?; @@ -3257,7 +2511,7 @@ impl<'a> Parser<'a> { /// /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple fn parse_click_house_tuple_def(&mut self) -> Result, ParserError> { - self.expect_keyword_is(Keyword::TUPLE)?; + self.expect_keyword(Keyword::TUPLE)?; self.expect_token(&Token::LParen)?; let mut field_defs = vec![]; loop { @@ -3306,16 +2560,10 @@ impl<'a> Parser<'a> { return infix; } - let dialect = self.dialect; - - self.advance_token(); - let tok = self.get_current_token(); - let tok_index = self.get_current_index(); - let span = tok.span; - let regular_binary_operator = match &tok.token { + let mut tok = self.next_token(); + let regular_binary_operator = match &mut tok.token { Token::Spaceship => Some(BinaryOperator::Spaceship), Token::DoubleEq => Some(BinaryOperator::Eq), - Token::Assignment => Some(BinaryOperator::Assignment), Token::Eq => Some(BinaryOperator::Eq), Token::Neq => Some(BinaryOperator::NotEq), Token::Gt => Some(BinaryOperator::Gt), @@ -3331,7 +2579,7 @@ impl<'a> Parser<'a> { Token::Caret => { // In PostgreSQL, ^ stands for the exponentiation operation, // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html - if dialect_is!(dialect is PostgreSqlDialect) { + if dialect_of!(self is PostgreSqlDialect) { Some(BinaryOperator::PGExp) } else { Some(BinaryOperator::BitwiseXor) @@ -3339,25 +2587,22 @@ impl<'a> Parser<'a> { } Token::Ampersand => Some(BinaryOperator::BitwiseAnd), Token::Div => Some(BinaryOperator::Divide), - Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => { + Token::DuckIntDiv if dialect_of!(self is DuckDbDialect | GenericDialect) => { Some(BinaryOperator::DuckIntegerDivide) } - Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => { + Token::ShiftLeft if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect) => { Some(BinaryOperator::PGBitwiseShiftLeft) } - Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => { + Token::ShiftRight if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect) => { Some(BinaryOperator::PGBitwiseShiftRight) } - Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => { + Token::Sharp if dialect_of!(self is PostgreSqlDialect) => { Some(BinaryOperator::PGBitwiseXor) } - Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => { + Token::Overlap if dialect_of!(self is PostgreSqlDialect | GenericDialect) => { Some(BinaryOperator::PGOverlap) } - Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { - Some(BinaryOperator::PGOverlap) - } - Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { + Token::CaretAt if dialect_of!(self is PostgreSqlDialect | GenericDialect) => { Some(BinaryOperator::PGStartsWith) } Token::Tilde => Some(BinaryOperator::PGRegexMatch), @@ -3380,65 +2625,13 @@ impl<'a> Parser<'a> { Token::Question => Some(BinaryOperator::Question), Token::QuestionAnd => Some(BinaryOperator::QuestionAnd), Token::QuestionPipe => Some(BinaryOperator::QuestionPipe), - Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())), - Token::DoubleSharp if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::DoubleHash) - } - - Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::AndLt) - } - Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::AndGt) - } - Token::QuestionMarkDash if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::QuestionDash) - } - Token::AmpersandLeftAngleBracketVerticalBar - if self.dialect.supports_geometric_types() => - { - Some(BinaryOperator::AndLtPipe) - } - Token::VerticalBarAmpersandRightAngleBracket - if self.dialect.supports_geometric_types() => - { - Some(BinaryOperator::PipeAndGt) - } - Token::TwoWayArrow if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::LtDashGt) - } - Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::LtCaret) - } - Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::GtCaret) - } - Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::QuestionHash) - } - Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::QuestionDoublePipe) - } - Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::QuestionDashPipe) - } - Token::TildeEqual if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::TildeEq) - } - Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::LtLtPipe) - } - Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => { - Some(BinaryOperator::PipeGtGt) - } - Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At), + Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(core::mem::take(s))), Token::Word(w) => match w.keyword { Keyword::AND => Some(BinaryOperator::And), Keyword::OR => Some(BinaryOperator::Or), Keyword::XOR => Some(BinaryOperator::Xor), - Keyword::OVERLAPS => Some(BinaryOperator::Overlaps), - Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { + Keyword::OPERATOR if dialect_of!(self is PostgreSqlDialect | GenericDialect) => { self.expect_token(&Token::LParen)?; // there are special rules for operator names in // postgres so we can not use 'parse_object' @@ -3446,8 +2639,7 @@ impl<'a> Parser<'a> { // See https://www.postgresql.org/docs/current/sql-createoperator.html let mut idents = vec![]; loop { - self.advance_token(); - idents.push(self.get_current_token().to_string()); + idents.push(self.next_token().to_string()); if !self.consume_token(&Token::Period) { break; } @@ -3460,7 +2652,6 @@ impl<'a> Parser<'a> { _ => None, }; - let tok = self.token_at(tok_index); if let Some(op) = regular_binary_operator { if let Some(keyword) = self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME]) @@ -3491,7 +2682,7 @@ impl<'a> Parser<'a> { format!( "Expected one of [=, >, <, =>, =<, !=] as comparison operator, found: {op}" ), - span.start + tok.location ); }; @@ -3542,11 +2733,9 @@ impl<'a> Parser<'a> { { let expr2 = self.parse_expr()?; Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2))) - } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) { - Ok(is_normalized) } else { self.expected( - "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS", + "[NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS", self.peek_token(), ) } @@ -3616,55 +2805,50 @@ impl<'a> Parser<'a> { self.expected("IN or BETWEEN after NOT", self.peek_token()) } } - Keyword::MEMBER => { - if self.parse_keyword(Keyword::OF) { - self.expect_token(&Token::LParen)?; - let array = self.parse_expr()?; - self.expect_token(&Token::RParen)?; - Ok(Expr::MemberOf(MemberOf { - value: Box::new(expr), - array: Box::new(array), - })) - } else { - self.expected("OF after MEMBER", self.peek_token()) - } - } // Can only happen if `get_next_precedence` got out of sync with this function _ => parser_err!( format!("No infix parser for token {:?}", tok.token), - tok.span.start + tok.location ), } - } else if Token::DoubleColon == *tok { + } else if Token::DoubleColon == tok { Ok(Expr::Cast { kind: CastKind::DoubleColon, expr: Box::new(expr), data_type: self.parse_data_type()?, format: None, }) - } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() { + } else if Token::ExclamationMark == tok { + // PostgreSQL factorial operation Ok(Expr::UnaryOp { op: UnaryOperator::PGPostfixFactorial, expr: Box::new(expr), }) - } else if Token::LBracket == *tok && self.dialect.supports_partiql() - || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok) - { + } else if Token::LBracket == tok { + if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect) { + self.parse_subscript(expr) + } else if dialect_of!(self is SnowflakeDialect) { + self.prev_token(); + self.parse_json_access(expr) + } else { + self.parse_map_access(expr) + } + } else if dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == tok { self.prev_token(); self.parse_json_access(expr) } else { // Can only happen if `get_next_precedence` got out of sync with this function parser_err!( format!("No infix parser for token {:?}", tok.token), - tok.span.start + tok.location ) } } /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO` - pub fn parse_escape_char(&mut self) -> Result, ParserError> { + pub fn parse_escape_char(&mut self) -> Result, ParserError> { if self.parse_keyword(Keyword::ESCAPE) { - Ok(Some(self.parse_value()?.into())) + Ok(Some(self.parse_literal_string()?)) } else { Ok(None) } @@ -3743,24 +2927,15 @@ impl<'a> Parser<'a> { }) } - /// Parse a multi-dimension array accessing like `[1:3][1][1]` - pub fn parse_multi_dim_subscript( - &mut self, - chain: &mut Vec, - ) -> Result<(), ParserError> { - while self.consume_token(&Token::LBracket) { - self.parse_subscript(chain)?; - } - Ok(()) - } - /// Parses an array subscript like `[1:3]` /// /// Parser is right after `[` - fn parse_subscript(&mut self, chain: &mut Vec) -> Result<(), ParserError> { + pub fn parse_subscript(&mut self, expr: Expr) -> Result { let subscript = self.parse_subscript_inner()?; - chain.push(AccessExpr::Subscript(subscript)); - Ok(()) + Ok(Expr::Subscript { + expr: Box::new(expr), + subscript: Box::new(subscript), + }) } fn parse_json_path_object_key(&mut self) -> Result { @@ -3788,14 +2963,6 @@ impl<'a> Parser<'a> { } fn parse_json_access(&mut self, expr: Expr) -> Result { - let path = self.parse_json_path()?; - Ok(Expr::JsonAccess { - value: Box::new(expr), - path, - }) - } - - fn parse_json_path(&mut self) -> Result { let mut path = Vec::new(); loop { match self.next_token().token { @@ -3819,7 +2986,50 @@ impl<'a> Parser<'a> { } debug_assert!(!path.is_empty()); - Ok(JsonPath { path }) + Ok(Expr::JsonAccess { + value: Box::new(expr), + path: JsonPath { path }, + }) + } + + pub fn parse_map_access(&mut self, expr: Expr) -> Result { + let key = self.parse_expr()?; + self.expect_token(&Token::RBracket)?; + + let mut keys = vec![MapAccessKey { + key, + syntax: MapAccessSyntax::Bracket, + }]; + loop { + let key = match self.peek_token().token { + Token::LBracket => { + self.next_token(); // consume `[` + let key = self.parse_expr()?; + self.expect_token(&Token::RBracket)?; + MapAccessKey { + key, + syntax: MapAccessSyntax::Bracket, + } + } + // Access on BigQuery nested and repeated expressions can + // mix notations in the same expression. + // https://cloud.google.com/bigquery/docs/nested-repeated#query_nested_and_repeated_columns + Token::Period if dialect_of!(self is BigQueryDialect) => { + self.next_token(); // consume `.` + MapAccessKey { + key: self.parse_expr()?, + syntax: MapAccessSyntax::Period, + } + } + _ => break, + }; + keys.push(key); + } + + Ok(Expr::MapAccess { + column: Box::new(expr), + keys, + }) } /// Parses the parens following the `[ NOT ] IN` operator. @@ -3837,13 +3047,15 @@ impl<'a> Parser<'a> { }); } self.expect_token(&Token::LParen)?; - let in_op = match self.maybe_parse(|p| p.parse_query())? { - Some(subquery) => Expr::InSubquery { + let in_op = if self.parse_keyword(Keyword::SELECT) || self.parse_keyword(Keyword::WITH) { + self.prev_token(); + Expr::InSubquery { expr: Box::new(expr), - subquery, + subquery: self.parse_query()?, negated, - }, - None => Expr::InList { + } + } else { + Expr::InList { expr: Box::new(expr), list: if self.dialect.supports_in_empty_list() { self.parse_comma_separated0(Parser::parse_expr, Token::RParen)? @@ -3851,7 +3063,7 @@ impl<'a> Parser<'a> { self.parse_comma_separated(Parser::parse_expr)? }, negated, - }, + } }; self.expect_token(&Token::RParen)?; Ok(in_op) @@ -3862,7 +3074,7 @@ impl<'a> Parser<'a> { // Stop parsing subexpressions for and on tokens with // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc. let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?; - self.expect_keyword_is(Keyword::AND)?; + self.expect_keyword(Keyword::AND)?; let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?; Ok(Expr::Between { expr: Box::new(expr), @@ -3872,7 +3084,7 @@ impl<'a> Parser<'a> { }) } - /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`. + /// Parse a postgresql casting style which is in the form of `expr::datatype`. pub fn parse_pg_cast(&mut self, expr: Expr) -> Result { Ok(Expr::Cast { kind: CastKind::DoubleColon, @@ -3887,26 +3099,12 @@ impl<'a> Parser<'a> { self.dialect.get_next_precedence_default(self) } - /// Return the token at the given location, or EOF if the index is beyond - /// the length of the current set of tokens. - pub fn token_at(&self, index: usize) -> &TokenWithSpan { - self.tokens.get(index).unwrap_or(&EOF_TOKEN) - } - /// Return the first non-whitespace token that has not yet been processed - /// or Token::EOF - /// - /// See [`Self::peek_token_ref`] to avoid the copy. - pub fn peek_token(&self) -> TokenWithSpan { + /// (or None if reached end-of-file) + pub fn peek_token(&self) -> TokenWithLocation { self.peek_nth_token(0) } - /// Return a reference to the first non-whitespace token that has not yet - /// been processed or Token::EOF - pub fn peek_token_ref(&self) -> &TokenWithSpan { - self.peek_nth_token_ref(0) - } - /// Returns the `N` next non-whitespace tokens that have not yet been /// processed. /// @@ -3938,63 +3136,41 @@ impl<'a> Parser<'a> { /// yet been processed. /// /// See [`Self::peek_token`] for an example. - pub fn peek_tokens_with_location(&self) -> [TokenWithSpan; N] { + pub fn peek_tokens_with_location(&self) -> [TokenWithLocation; N] { let mut index = self.index; core::array::from_fn(|_| loop { let token = self.tokens.get(index); index += 1; - if let Some(TokenWithSpan { + if let Some(TokenWithLocation { token: Token::Whitespace(_), - span: _, + location: _, }) = token { continue; } - break token.cloned().unwrap_or(TokenWithSpan { + break token.cloned().unwrap_or(TokenWithLocation { token: Token::EOF, - span: Span::empty(), + location: Location { line: 0, column: 0 }, }); }) } - /// Returns references to the `N` next non-whitespace tokens - /// that have not yet been processed. - /// - /// See [`Self::peek_tokens`] for an example. - pub fn peek_tokens_ref(&self) -> [&TokenWithSpan; N] { - let mut index = self.index; - core::array::from_fn(|_| loop { - let token = self.tokens.get(index); - index += 1; - if let Some(TokenWithSpan { - token: Token::Whitespace(_), - span: _, - }) = token - { - continue; - } - break token.unwrap_or(&EOF_TOKEN); - }) - } - /// Return nth non-whitespace token that has not yet been processed - pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan { - self.peek_nth_token_ref(n).clone() - } - - /// Return nth non-whitespace token that has not yet been processed - pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan { + pub fn peek_nth_token(&self, mut n: usize) -> TokenWithLocation { let mut index = self.index; loop { index += 1; match self.tokens.get(index - 1) { - Some(TokenWithSpan { + Some(TokenWithLocation { token: Token::Whitespace(_), - span: _, + location: _, }) => continue, non_whitespace => { if n == 0 { - return non_whitespace.unwrap_or(&EOF_TOKEN); + return non_whitespace.cloned().unwrap_or(TokenWithLocation { + token: Token::EOF, + location: Location { line: 0, column: 0 }, + }); } n -= 1; } @@ -4004,104 +3180,57 @@ impl<'a> Parser<'a> { /// Return the first token, possibly whitespace, that has not yet been processed /// (or None if reached end-of-file). - pub fn peek_token_no_skip(&self) -> TokenWithSpan { + pub fn peek_token_no_skip(&self) -> TokenWithLocation { self.peek_nth_token_no_skip(0) } /// Return nth token, possibly whitespace, that has not yet been processed. - pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan { + pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithLocation { self.tokens .get(self.index + n) .cloned() - .unwrap_or(TokenWithSpan { + .unwrap_or(TokenWithLocation { token: Token::EOF, - span: Span::empty(), + location: Location { line: 0, column: 0 }, }) } - /// Return true if the next tokens exactly `expected` - /// - /// Does not advance the current token. - fn peek_keywords(&mut self, expected: &[Keyword]) -> bool { - let index = self.index; - let matched = self.parse_keywords(expected); - self.index = index; - matched - } - - /// Advances to the next non-whitespace token and returns a copy. - /// - /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to - /// avoid the copy. - pub fn next_token(&mut self) -> TokenWithSpan { - self.advance_token(); - self.get_current_token().clone() - } - - /// Returns the index of the current token - /// - /// This can be used with APIs that expect an index, such as - /// [`Self::token_at`] - pub fn get_current_index(&self) -> usize { - self.index.saturating_sub(1) - } - - /// Return the next unprocessed token, possibly whitespace. - pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> { - self.index += 1; - self.tokens.get(self.index - 1) - } - - /// Advances the current token to the next non-whitespace token - /// - /// See [`Self::get_current_token`] to get the current token after advancing - pub fn advance_token(&mut self) { + /// Return the first non-whitespace token that has not yet been processed + /// (or None if reached end-of-file) and mark it as processed. OK to call + /// repeatedly after reaching EOF. + pub fn next_token(&mut self) -> TokenWithLocation { loop { self.index += 1; match self.tokens.get(self.index - 1) { - Some(TokenWithSpan { + Some(TokenWithLocation { token: Token::Whitespace(_), - span: _, + location: _, }) => continue, - _ => break, + token => { + return token + .cloned() + .unwrap_or_else(|| TokenWithLocation::wrap(Token::EOF)) + } } } } - /// Returns a reference to the current token - /// - /// Does not advance the current token. - pub fn get_current_token(&self) -> &TokenWithSpan { - self.token_at(self.index.saturating_sub(1)) + /// Return the first unprocessed token, possibly whitespace. + pub fn next_token_no_skip(&mut self) -> Option<&TokenWithLocation> { + self.index += 1; + self.tokens.get(self.index - 1) } - /// Returns a reference to the previous token - /// - /// Does not advance the current token. - pub fn get_previous_token(&self) -> &TokenWithSpan { - self.token_at(self.index.saturating_sub(2)) - } - - /// Returns a reference to the next token - /// - /// Does not advance the current token. - pub fn get_next_token(&self) -> &TokenWithSpan { - self.token_at(self.index) - } - - /// Seek back the last one non-whitespace token. - /// - /// Must be called after `next_token()`, otherwise might panic. OK to call - /// after `next_token()` indicates an EOF. - /// - // TODO rename to backup_token and deprecate prev_token? + /// Push back the last one non-whitespace token. Must be called after + /// `next_token()`, otherwise might panic. OK to call after + /// `next_token()` indicates an EOF. pub fn prev_token(&mut self) { loop { assert!(self.index > 0); self.index -= 1; - if let Some(TokenWithSpan { + if let Some(TokenWithLocation { token: Token::Whitespace(_), - span: _, + location: _, }) = self.tokens.get(self.index) { continue; @@ -4111,27 +3240,10 @@ impl<'a> Parser<'a> { } /// Report `found` was encountered instead of `expected` - pub fn expected(&self, expected: &str, found: TokenWithSpan) -> Result { + pub fn expected(&self, expected: &str, found: TokenWithLocation) -> Result { parser_err!( format!("Expected: {expected}, found: {found}"), - found.span.start - ) - } - - /// report `found` was encountered instead of `expected` - pub fn expected_ref(&self, expected: &str, found: &TokenWithSpan) -> Result { - parser_err!( - format!("Expected: {expected}, found: {found}"), - found.span.start - ) - } - - /// Report that the token at `index` was found instead of `expected`. - pub fn expected_at(&self, expected: &str, index: usize) -> Result { - let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN); - parser_err!( - format!("Expected: {expected}, found: {found}"), - found.span.start + found.location ) } @@ -4139,19 +3251,15 @@ impl<'a> Parser<'a> { /// true. Otherwise, no tokens are consumed and returns false. #[must_use] pub fn parse_keyword(&mut self, expected: Keyword) -> bool { - if self.peek_keyword(expected) { - self.advance_token(); - true - } else { - false + match self.peek_token().token { + Token::Word(w) if expected == w.keyword => { + self.next_token(); + true + } + _ => false, } } - #[must_use] - pub fn peek_keyword(&self, expected: Keyword) -> bool { - matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword) - } - /// If the current token is the `expected` keyword followed by /// specified tokens, consume them and returns true. /// Otherwise, no tokens are consumed and returns false. @@ -4160,16 +3268,16 @@ impl<'a> Parser<'a> { /// not be efficient as it does a loop on the tokens with `peek_nth_token` /// each time. pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool { - match &self.peek_token_ref().token { + match self.peek_token().token { Token::Word(w) if expected == w.keyword => { for (idx, token) in tokens.iter().enumerate() { - if self.peek_nth_token_ref(idx + 1).token != *token { + if self.peek_nth_token(idx + 1).token != *token { return false; } } // consume all tokens for _ in 0..(tokens.len() + 1) { - self.advance_token(); + self.next_token(); } true } @@ -4194,30 +3302,18 @@ impl<'a> Parser<'a> { true } - /// If the current token is one of the given `keywords`, returns the keyword - /// that matches, without consuming the token. Otherwise, returns [`None`]. - #[must_use] - pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option { - for keyword in keywords { - if self.peek_keyword(*keyword) { - return Some(*keyword); - } - } - None - } - /// If the current token is one of the given `keywords`, consume the token /// and return the keyword that matches. Otherwise, no tokens are consumed /// and returns [`None`]. #[must_use] pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option { - match &self.peek_token_ref().token { + match self.peek_token().token { Token::Word(w) => { keywords .iter() .find(|keyword| **keyword == w.keyword) .map(|keyword| { - self.advance_token(); + self.next_token(); *keyword }) } @@ -4232,35 +3328,20 @@ impl<'a> Parser<'a> { Ok(keyword) } else { let keywords: Vec = keywords.iter().map(|x| format!("{x:?}")).collect(); - self.expected_ref( + self.expected( &format!("one of {}", keywords.join(" or ")), - self.peek_token_ref(), + self.peek_token(), ) } } /// If the current token is the `expected` keyword, consume the token. /// Otherwise, return an error. - /// - // todo deprecate in favor of expected_keyword_is - pub fn expect_keyword(&mut self, expected: Keyword) -> Result { - if self.parse_keyword(expected) { - Ok(self.get_current_token().clone()) - } else { - self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref()) - } - } - - /// If the current token is the `expected` keyword, consume the token. - /// Otherwise, return an error. - /// - /// This differs from expect_keyword only in that the matched keyword - /// token is not returned. - pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> { + pub fn expect_keyword(&mut self, expected: Keyword) -> Result<(), ParserError> { if self.parse_keyword(expected) { Ok(()) } else { - self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref()) + self.expected(format!("{:?}", &expected).as_str(), self.peek_token()) } } @@ -4268,18 +3349,16 @@ impl<'a> Parser<'a> { /// sequence, consume them and returns Ok. Otherwise, return an Error. pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> { for &kw in expected { - self.expect_keyword_is(kw)?; + self.expect_keyword(kw)?; } Ok(()) } /// Consume the next token if it matches the expected token, otherwise return false - /// - /// See [Self::advance_token] to consume the token unconditionally #[must_use] pub fn consume_token(&mut self, expected: &Token) -> bool { - if self.peek_token_ref() == expected { - self.advance_token(); + if self.peek_token() == *expected { + self.next_token(); true } else { false @@ -4302,11 +3381,11 @@ impl<'a> Parser<'a> { } /// Bail out if the current token is not an expected keyword, or consume it if it is - pub fn expect_token(&mut self, expected: &Token) -> Result { - if self.peek_token_ref() == expected { - Ok(self.next_token()) + pub fn expect_token(&mut self, expected: &Token) -> Result<(), ParserError> { + if self.consume_token(expected) { + Ok(()) } else { - self.expected_ref(&expected.to_string(), self.peek_token_ref()) + self.expected(&expected.to_string(), self.peek_token()) } } @@ -4328,18 +3407,19 @@ impl<'a> Parser<'a> { // e.g. `SELECT 1, 2, FROM t` // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas + // + // This pattern could be captured better with RAII type semantics, but it's quite a bit of + // code to add for just one case, so we'll just do it manually here. + let old_value = self.options.trailing_commas; + self.options.trailing_commas |= self.dialect.supports_projection_trailing_commas(); - let trailing_commas = - self.options.trailing_commas | self.dialect.supports_projection_trailing_commas(); + let ret = self.parse_comma_separated(|p| p.parse_select_item()); + self.options.trailing_commas = old_value; - self.parse_comma_separated_with_trailing_commas( - |p| p.parse_select_item(), - trailing_commas, - Self::is_reserved_for_column_alias, - ) + ret } - pub fn parse_actions_list(&mut self) -> Result, ParserError> { + pub fn parse_actions_list(&mut self) -> Result, ParserError> { let mut values = vec![]; loop { values.push(self.parse_grant_permission()?); @@ -4362,111 +3442,38 @@ impl<'a> Parser<'a> { Ok(values) } - /// Parse a list of [TableWithJoins] - fn parse_table_with_joins(&mut self) -> Result, ParserError> { - let trailing_commas = self.dialect.supports_from_trailing_commas(); - - self.parse_comma_separated_with_trailing_commas( - Parser::parse_table_and_joins, - trailing_commas, - |kw, _parser| { - self.dialect - .get_reserved_keywords_for_table_factor() - .contains(kw) - }, - ) - } - /// Parse the comma of a comma-separated syntax element. - /// `R` is a predicate that should return true if the next - /// keyword is a reserved keyword. - /// Allows for control over trailing commas - /// /// Returns true if there is a next element - fn is_parse_comma_separated_end_with_trailing_commas( - &mut self, - trailing_commas: bool, - is_reserved_keyword: &R, - ) -> bool - where - R: Fn(&Keyword, &mut Parser) -> bool, - { + fn is_parse_comma_separated_end(&mut self) -> bool { if !self.consume_token(&Token::Comma) { true - } else if trailing_commas { - let token = self.next_token().token; - let is_end = match token { - Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true, + } else if self.options.trailing_commas { + let token = self.peek_token().token; + match token { + Token::Word(ref kw) + if keywords::RESERVED_FOR_COLUMN_ALIAS.contains(&kw.keyword) => + { + true + } Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => { true } _ => false, - }; - self.prev_token(); - - is_end + } } else { false } } - /// Parse the comma of a comma-separated syntax element. - /// Returns true if there is a next element - fn is_parse_comma_separated_end(&mut self) -> bool { - self.is_parse_comma_separated_end_with_trailing_commas( - self.options.trailing_commas, - &Self::is_reserved_for_column_alias, - ) - } - /// Parse a comma-separated list of 1+ items accepted by `F` - pub fn parse_comma_separated(&mut self, f: F) -> Result, ParserError> - where - F: FnMut(&mut Parser<'a>) -> Result, - { - self.parse_comma_separated_with_trailing_commas( - f, - self.options.trailing_commas, - Self::is_reserved_for_column_alias, - ) - } - - /// Parse a comma-separated list of 1+ items accepted by `F`. - /// `R` is a predicate that should return true if the next - /// keyword is a reserved keyword. - /// Allows for control over trailing commas. - fn parse_comma_separated_with_trailing_commas( - &mut self, - mut f: F, - trailing_commas: bool, - is_reserved_keyword: R, - ) -> Result, ParserError> - where - F: FnMut(&mut Parser<'a>) -> Result, - R: Fn(&Keyword, &mut Parser) -> bool, - { - let mut values = vec![]; - loop { - values.push(f(self)?); - if self.is_parse_comma_separated_end_with_trailing_commas( - trailing_commas, - &is_reserved_keyword, - ) { - break; - } - } - Ok(values) - } - - /// Parse a period-separated list of 1+ items accepted by `F` - fn parse_period_separated(&mut self, mut f: F) -> Result, ParserError> + pub fn parse_comma_separated(&mut self, mut f: F) -> Result, ParserError> where F: FnMut(&mut Parser<'a>) -> Result, { let mut values = vec![]; loop { values.push(f(self)?); - if !self.consume_token(&Token::Period) { + if self.is_parse_comma_separated_end() { break; } } @@ -4524,64 +3531,19 @@ impl<'a> Parser<'a> { self.parse_comma_separated(f) } - /// Parses 0 or more statements, each followed by a semicolon. - /// If the next token is any of `terminal_keywords` then no more - /// statements will be parsed. - pub(crate) fn parse_statement_list( - &mut self, - terminal_keywords: &[Keyword], - ) -> Result, ParserError> { - let mut values = vec![]; - loop { - match &self.peek_nth_token_ref(0).token { - Token::EOF => break, - Token::Word(w) => { - if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) { - break; - } - } - _ => {} - } - - values.push(self.parse_statement()?); - self.expect_token(&Token::SemiColon)?; - } - Ok(values) - } - - /// Default implementation of a predicate that returns true if - /// the specified keyword is reserved for column alias. - /// See [Dialect::is_column_alias] - fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool { - !parser.dialect.is_column_alias(kw, parser) - } - /// Run a parser method `f`, reverting back to the current position if unsuccessful. - /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`. - /// Returns `Ok(None)` if `f` returns any other error. - pub fn maybe_parse(&mut self, f: F) -> Result, ParserError> - where - F: FnMut(&mut Parser) -> Result, - { - match self.try_parse(f) { - Ok(t) => Ok(Some(t)), - Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded), - _ => Ok(None), - } - } - - /// Run a parser method `f`, reverting back to the current position if unsuccessful. - pub fn try_parse(&mut self, mut f: F) -> Result + pub fn maybe_parse(&mut self, mut f: F) -> Result, ParserError> where F: FnMut(&mut Parser) -> Result, { let index = self.index; match f(self) { - Ok(t) => Ok(t), - Err(e) => { - // Unwind stack if limit exceeded + Ok(t) => Ok(Some(t)), + // Unwind stack if limit exceeded + Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded), + Err(_) => { self.index = index; - Err(e) + Ok(None) } } } @@ -4589,7 +3551,7 @@ impl<'a> Parser<'a> { /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found. pub fn parse_all_or_distinct(&mut self) -> Result, ParserError> { - let loc = self.peek_token().span.start; + let loc = self.peek_token().location; let all = self.parse_keyword(Keyword::ALL); let distinct = self.parse_keyword(Keyword::DISTINCT); if !distinct { @@ -4633,24 +3595,21 @@ impl<'a> Parser<'a> { .is_some(); let persistent = dialect_of!(self is DuckDbDialect) && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some(); - let create_view_params = self.parse_create_view_params()?; if self.parse_keyword(Keyword::TABLE) { self.parse_create_table(or_replace, temporary, global, transient) } else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) { self.prev_token(); - self.parse_create_view(or_alter, or_replace, temporary, create_view_params) + self.parse_create_view(or_replace, temporary) } else if self.parse_keyword(Keyword::POLICY) { self.parse_create_policy() } else if self.parse_keyword(Keyword::EXTERNAL) { self.parse_create_external_table(or_replace) } else if self.parse_keyword(Keyword::FUNCTION) { - self.parse_create_function(or_alter, or_replace, temporary) - } else if self.parse_keyword(Keyword::DOMAIN) { - self.parse_create_domain() + self.parse_create_function(or_replace, temporary) } else if self.parse_keyword(Keyword::TRIGGER) { - self.parse_create_trigger(or_alter, or_replace, false) + self.parse_create_trigger(or_replace, false) } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) { - self.parse_create_trigger(or_alter, or_replace, true) + self.parse_create_trigger(or_replace, true) } else if self.parse_keyword(Keyword::MACRO) { self.parse_create_macro(or_replace, temporary) } else if self.parse_keyword(Keyword::SECRET) { @@ -4680,10 +3639,6 @@ impl<'a> Parser<'a> { self.parse_create_type() } else if self.parse_keyword(Keyword::PROCEDURE) { self.parse_create_procedure(or_alter) - } else if self.parse_keyword(Keyword::CONNECTOR) { - self.parse_create_connector() - } else if self.parse_keyword(Keyword::SERVER) { - self.parse_pg_create_server() } else { self.expected("an object type after CREATE", self.peek_token()) } @@ -4702,9 +3657,9 @@ impl<'a> Parser<'a> { let mut name = None; if self.peek_token() != Token::LParen { if self.parse_keyword(Keyword::IN) { - storage_specifier = self.parse_identifier().ok() + storage_specifier = self.parse_identifier(false).ok() } else { - name = self.parse_identifier().ok(); + name = self.parse_identifier(false).ok(); } // Storage specifier may follow the name @@ -4712,19 +3667,19 @@ impl<'a> Parser<'a> { && self.peek_token() != Token::LParen && self.parse_keyword(Keyword::IN) { - storage_specifier = self.parse_identifier().ok(); + storage_specifier = self.parse_identifier(false).ok(); } } self.expect_token(&Token::LParen)?; - self.expect_keyword_is(Keyword::TYPE)?; - let secret_type = self.parse_identifier()?; + self.expect_keyword(Keyword::TYPE)?; + let secret_type = self.parse_identifier(false)?; let mut options = Vec::new(); if self.consume_token(&Token::Comma) { options.append(&mut self.parse_comma_separated(|p| { - let key = p.parse_identifier()?; - let value = p.parse_identifier()?; + let key = p.parse_identifier(false)?; + let value = p.parse_identifier(false)?; Ok(SecretOption { key, value }) })?); } @@ -4840,7 +3795,7 @@ impl<'a> Parser<'a> { /// Parse a UNCACHE TABLE statement pub fn parse_uncache_table(&mut self) -> Result { - self.expect_keyword_is(Keyword::TABLE)?; + self.expect_keyword(Keyword::TABLE)?; let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; Ok(Statement::UNCache { @@ -4851,11 +3806,11 @@ impl<'a> Parser<'a> { /// SQLite-specific `CREATE VIRTUAL TABLE` pub fn parse_create_virtual_table(&mut self) -> Result { - self.expect_keyword_is(Keyword::TABLE)?; + self.expect_keyword(Keyword::TABLE)?; let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; - self.expect_keyword_is(Keyword::USING)?; - let module_name = self.parse_identifier()?; + self.expect_keyword(Keyword::USING)?; + let module_name = self.parse_identifier(false)?; // SQLite docs note that module "arguments syntax is sufficiently // general that the arguments can be made to appear as column // definitions in a traditional CREATE TABLE statement", but @@ -4874,43 +3829,24 @@ impl<'a> Parser<'a> { let schema_name = self.parse_schema_name()?; - let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) { - Some(self.parse_expr()?) - } else { - None - }; - - let with = if self.peek_keyword(Keyword::WITH) { - Some(self.parse_options(Keyword::WITH)?) - } else { - None - }; - - let options = if self.peek_keyword(Keyword::OPTIONS) { - Some(self.parse_options(Keyword::OPTIONS)?) - } else { - None - }; - Ok(Statement::CreateSchema { schema_name, if_not_exists, - with, - options, - default_collate_spec, }) } fn parse_schema_name(&mut self) -> Result { if self.parse_keyword(Keyword::AUTHORIZATION) { - Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?)) + Ok(SchemaName::UnnamedAuthorization( + self.parse_identifier(false)?, + )) } else { let name = self.parse_object_name(false)?; if self.parse_keyword(Keyword::AUTHORIZATION) { Ok(SchemaName::NamedAuthorization( name, - self.parse_identifier()?, + self.parse_identifier(false)?, )) } else { Ok(SchemaName::Simple(name)) @@ -4957,14 +3893,13 @@ impl<'a> Parser<'a> { Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))), _ => self.expected( "JAR, FILE or ARCHIVE, got {:?}", - TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())), + TokenWithLocation::wrap(Token::make_keyword(format!("{keyword:?}").as_str())), ), } } pub fn parse_create_function( &mut self, - or_alter: bool, or_replace: bool, temporary: bool, ) -> Result { @@ -4976,30 +3911,29 @@ impl<'a> Parser<'a> { self.parse_create_macro(or_replace, temporary) } else if dialect_of!(self is BigQueryDialect) { self.parse_bigquery_create_function(or_replace, temporary) - } else if dialect_of!(self is MsSqlDialect) { - self.parse_mssql_create_function(or_alter, or_replace, temporary) } else { self.prev_token(); self.expected("an object type after CREATE", self.peek_token()) } } - /// Parse `CREATE FUNCTION` for [PostgreSQL] + /// Parse `CREATE FUNCTION` for [Postgres] /// - /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html + /// [Postgres]: https://www.postgresql.org/docs/15/sql-createfunction.html fn parse_postgres_create_function( &mut self, or_replace: bool, temporary: bool, ) -> Result { let name = self.parse_object_name(false)?; - self.expect_token(&Token::LParen)?; - let args = if Token::RParen != self.peek_token_ref().token { - self.parse_comma_separated(Parser::parse_function_arg)? + let args = if self.consume_token(&Token::RParen) { + self.prev_token(); + None } else { - vec![] + Some(self.parse_comma_separated(Parser::parse_function_arg)?) }; + self.expect_token(&Token::RParen)?; let return_type = if self.parse_keyword(Keyword::RETURNS) { @@ -5033,7 +3967,7 @@ impl<'a> Parser<'a> { )); } else if self.parse_keyword(Keyword::LANGUAGE) { ensure_not_set(&body.language, "LANGUAGE")?; - body.language = Some(self.parse_identifier()?); + body.language = Some(self.parse_identifier(false)?); } else if self.parse_keyword(Keyword::IMMUTABLE) { ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?; body.behavior = Some(FunctionBehavior::Immutable); @@ -5091,12 +4025,11 @@ impl<'a> Parser<'a> { } } - Ok(Statement::CreateFunction(CreateFunction { - or_alter: false, + Ok(Statement::CreateFunction { or_replace, temporary, name, - args: Some(args), + args, return_type, behavior: body.behavior, called_on_null: body.called_on_null, @@ -5108,7 +4041,7 @@ impl<'a> Parser<'a> { determinism_specifier: None, options: None, remote_connection: None, - })) + }) } /// Parse `CREATE FUNCTION` for [Hive] @@ -5120,13 +4053,12 @@ impl<'a> Parser<'a> { temporary: bool, ) -> Result { let name = self.parse_object_name(false)?; - self.expect_keyword_is(Keyword::AS)?; + self.expect_keyword(Keyword::AS)?; let as_ = self.parse_create_function_body_string()?; let using = self.parse_optional_create_function_using()?; - Ok(Statement::CreateFunction(CreateFunction { - or_alter: false, + Ok(Statement::CreateFunction { or_replace, temporary, name, @@ -5142,7 +4074,7 @@ impl<'a> Parser<'a> { determinism_specifier: None, options: None, remote_connection: None, - })) + }) } /// Parse `CREATE FUNCTION` for [BigQuery] @@ -5154,7 +4086,22 @@ impl<'a> Parser<'a> { temporary: bool, ) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); - let (name, args) = self.parse_create_function_name_and_params()?; + let name = self.parse_object_name(false)?; + + let parse_function_param = + |parser: &mut Parser| -> Result { + let name = parser.parse_identifier(false)?; + let data_type = parser.parse_data_type()?; + Ok(OperateFunctionArg { + mode: None, + name: Some(name), + data_type, + default_expr: None, + }) + }; + self.expect_token(&Token::LParen)?; + let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?; + self.expect_token(&Token::RParen)?; let return_type = if self.parse_keyword(Keyword::RETURNS) { Some(self.parse_data_type()?) @@ -5171,7 +4118,7 @@ impl<'a> Parser<'a> { }; let language = if self.parse_keyword(Keyword::LANGUAGE) { - Some(self.parse_identifier()?) + Some(self.parse_identifier(false)?) } else { None }; @@ -5188,7 +4135,7 @@ impl<'a> Parser<'a> { let mut options = self.maybe_parse_options(Keyword::OPTIONS)?; let function_body = if remote_connection.is_none() { - self.expect_keyword_is(Keyword::AS)?; + self.expect_keyword(Keyword::AS)?; let expr = self.parse_expr()?; if options.is_none() { options = self.maybe_parse_options(Keyword::OPTIONS)?; @@ -5200,8 +4147,7 @@ impl<'a> Parser<'a> { None }; - Ok(Statement::CreateFunction(CreateFunction { - or_alter: false, + Ok(Statement::CreateFunction { or_replace, temporary, if_not_exists, @@ -5217,123 +4163,7 @@ impl<'a> Parser<'a> { behavior: None, called_on_null: None, parallel: None, - })) - } - - /// Parse `CREATE FUNCTION` for [MsSql] - /// - /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql - fn parse_mssql_create_function( - &mut self, - or_alter: bool, - or_replace: bool, - temporary: bool, - ) -> Result { - let (name, args) = self.parse_create_function_name_and_params()?; - - self.expect_keyword(Keyword::RETURNS)?; - - let return_table = self.maybe_parse(|p| { - let return_table_name = p.parse_identifier()?; - - p.expect_keyword_is(Keyword::TABLE)?; - p.prev_token(); - - let table_column_defs = match p.parse_data_type()? { - DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => { - table_column_defs - } - _ => parser_err!( - "Expected table column definitions after TABLE keyword", - p.peek_token().span.start - )?, - }; - - Ok(DataType::NamedTable { - name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]), - columns: table_column_defs, - }) - })?; - - let return_type = if return_table.is_some() { - return_table - } else { - Some(self.parse_data_type()?) - }; - - let _ = self.parse_keyword(Keyword::AS); - - let function_body = if self.peek_keyword(Keyword::BEGIN) { - let begin_token = self.expect_keyword(Keyword::BEGIN)?; - let statements = self.parse_statement_list(&[Keyword::END])?; - let end_token = self.expect_keyword(Keyword::END)?; - - Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements { - begin_token: AttachedToken(begin_token), - statements, - end_token: AttachedToken(end_token), - })) - } else if self.parse_keyword(Keyword::RETURN) { - if self.peek_token() == Token::LParen { - Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?)) - } else if self.peek_keyword(Keyword::SELECT) { - let select = self.parse_select()?; - Some(CreateFunctionBody::AsReturnSelect(select)) - } else { - parser_err!( - "Expected a subquery (or bare SELECT statement) after RETURN", - self.peek_token().span.start - )? - } - } else { - parser_err!("Unparsable function body", self.peek_token().span.start)? - }; - - Ok(Statement::CreateFunction(CreateFunction { - or_alter, - or_replace, - temporary, - if_not_exists: false, - name, - args: Some(args), - return_type, - function_body, - language: None, - determinism_specifier: None, - options: None, - remote_connection: None, - using: None, - behavior: None, - called_on_null: None, - parallel: None, - })) - } - - fn parse_create_function_name_and_params( - &mut self, - ) -> Result<(ObjectName, Vec), ParserError> { - let name = self.parse_object_name(false)?; - let parse_function_param = - |parser: &mut Parser| -> Result { - let name = parser.parse_identifier()?; - let data_type = parser.parse_data_type()?; - let default_expr = if parser.consume_token(&Token::Eq) { - Some(parser.parse_expr()?) - } else { - None - }; - - Ok(OperateFunctionArg { - mode: None, - name: Some(name), - data_type, - default_expr, - }) - }; - self.expect_token(&Token::LParen)?; - let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?; - self.expect_token(&Token::RParen)?; - Ok((name, args)) + }) } fn parse_function_arg(&mut self) -> Result { @@ -5350,21 +4180,10 @@ impl<'a> Parser<'a> { // parse: [ argname ] argtype let mut name = None; let mut data_type = self.parse_data_type()?; - - // To check whether the first token is a name or a type, we need to - // peek the next token, which if it is another type keyword, then the - // first token is a name and not a type in itself. - let data_type_idx = self.get_current_index(); - if let Some(next_data_type) = self.maybe_parse(|parser| parser.parse_data_type())? { - let token = self.token_at(data_type_idx); - - // We ensure that the token is a `Word` token, and not other special tokens. - if !matches!(token.token, Token::Word(_)) { - return self.expected("a name or type", token.clone()); - } - - name = Some(Ident::new(token.to_string())); - data_type = next_data_type; + if let DataType::Custom(n, _) = &data_type { + // the first token is actually a name + name = Some(n.0[0].clone()); + data_type = self.parse_data_type()?; } let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq) @@ -5387,17 +4206,14 @@ impl<'a> Parser<'a> { /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ] /// ``` pub fn parse_drop_trigger(&mut self) -> Result { - if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) { + if !dialect_of!(self is PostgreSqlDialect | GenericDialect) { self.prev_token(); return self.expected("an object type after DROP", self.peek_token()); } let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let trigger_name = self.parse_object_name(false)?; - let table_name = if self.parse_keyword(Keyword::ON) { - Some(self.parse_object_name(false)?) - } else { - None - }; + self.expect_keyword(Keyword::ON)?; + let table_name = self.parse_object_name(false)?; let option = self .parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) .map(|keyword| match keyword { @@ -5415,11 +4231,10 @@ impl<'a> Parser<'a> { pub fn parse_create_trigger( &mut self, - or_alter: bool, or_replace: bool, is_constraint: bool, ) -> Result { - if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) { + if !dialect_of!(self is PostgreSqlDialect | GenericDialect) { self.prev_token(); return self.expected("an object type after CREATE", self.peek_token()); } @@ -5428,7 +4243,7 @@ impl<'a> Parser<'a> { let period = self.parse_trigger_period()?; let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?; - self.expect_keyword_is(Keyword::ON)?; + self.expect_keyword(Keyword::ON)?; let table_name = self.parse_object_name(false)?; let referenced_table_name = if self.parse_keyword(Keyword::FROM) { @@ -5446,7 +4261,7 @@ impl<'a> Parser<'a> { } } - self.expect_keyword_is(Keyword::FOR)?; + self.expect_keyword(Keyword::FOR)?; let include_each = self.parse_keyword(Keyword::EACH); let trigger_object = match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? { @@ -5460,12 +4275,11 @@ impl<'a> Parser<'a> { .then(|| self.parse_expr()) .transpose()?; - self.expect_keyword_is(Keyword::EXECUTE)?; + self.expect_keyword(Keyword::EXECUTE)?; let exec_body = self.parse_trigger_exec_body()?; Ok(Statement::CreateTrigger { - or_alter, or_replace, is_constraint, name, @@ -5477,8 +4291,7 @@ impl<'a> Parser<'a> { trigger_object, include_each, condition, - exec_body: Some(exec_body), - statements: None, + exec_body, characteristics, }) } @@ -5486,16 +4299,14 @@ impl<'a> Parser<'a> { pub fn parse_trigger_period(&mut self) -> Result { Ok( match self.expect_one_of_keywords(&[ - Keyword::FOR, Keyword::BEFORE, Keyword::AFTER, Keyword::INSTEAD, ])? { - Keyword::FOR => TriggerPeriod::For, Keyword::BEFORE => TriggerPeriod::Before, Keyword::AFTER => TriggerPeriod::After, Keyword::INSTEAD => self - .expect_keyword_is(Keyword::OF) + .expect_keyword(Keyword::OF) .map(|_| TriggerPeriod::InsteadOf)?, _ => unreachable!(), }, @@ -5513,7 +4324,9 @@ impl<'a> Parser<'a> { Keyword::INSERT => TriggerEvent::Insert, Keyword::UPDATE => { if self.parse_keyword(Keyword::OF) { - let cols = self.parse_comma_separated(Parser::parse_identifier)?; + let cols = self.parse_comma_separated(|ident| { + Parser::parse_identifier(ident, false) + })?; TriggerEvent::Update(cols) } else { TriggerEvent::Update(vec![]) @@ -5577,7 +4390,7 @@ impl<'a> Parser<'a> { }; self.expect_token(&Token::RParen)?; - self.expect_keyword_is(Keyword::AS)?; + self.expect_keyword(Keyword::AS)?; Ok(Statement::CreateMacro { or_replace, @@ -5597,7 +4410,7 @@ impl<'a> Parser<'a> { } fn parse_macro_arg(&mut self) -> Result { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let default_expr = if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) { @@ -5612,7 +4425,7 @@ impl<'a> Parser<'a> { &mut self, or_replace: bool, ) -> Result { - self.expect_keyword_is(Keyword::TABLE)?; + self.expect_keyword(Keyword::TABLE)?; let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; let (columns, constraints) = self.parse_columns()?; @@ -5630,17 +4443,12 @@ impl<'a> Parser<'a> { }; let location = hive_formats.location.clone(); let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?; - let table_options = if !table_properties.is_empty() { - CreateTableOptions::TableProperties(table_properties) - } else { - CreateTableOptions::None - }; Ok(CreateTableBuilder::new(table_name) .columns(columns) .constraints(constraints) .hive_distribution(hive_distribution) .hive_formats(Some(hive_formats)) - .table_options(table_options) + .table_properties(table_properties) .or_replace(or_replace) .if_not_exists(if_not_exists) .external(true) @@ -5681,13 +4489,11 @@ impl<'a> Parser<'a> { pub fn parse_create_view( &mut self, - or_alter: bool, or_replace: bool, temporary: bool, - create_view_params: Option, ) -> Result { let materialized = self.parse_keyword(Keyword::MATERIALIZED); - self.expect_keyword_is(Keyword::VIEW)?; + self.expect_keyword(Keyword::VIEW)?; let if_not_exists = dialect_of!(self is BigQueryDialect|SQLiteDialect|GenericDialect) && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet). @@ -5702,7 +4508,7 @@ impl<'a> Parser<'a> { } let cluster_by = if self.parse_keyword(Keyword::CLUSTER) { - self.expect_keyword_is(Keyword::BY)?; + self.expect_keyword(Keyword::BY)?; self.parse_parenthesized_column_list(Optional, false)? } else { vec![] @@ -5728,12 +4534,16 @@ impl<'a> Parser<'a> { && self.parse_keyword(Keyword::COMMENT) { self.expect_token(&Token::Eq)?; - Some(self.parse_comment_value()?) + let next_token = self.next_token(); + match next_token.token { + Token::SingleQuotedString(str) => Some(str), + _ => self.expected("string literal", next_token)?, + } } else { None }; - self.expect_keyword_is(Keyword::AS)?; + self.expect_keyword(Keyword::AS)?; let query = self.parse_query()?; // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here. @@ -5746,7 +4556,6 @@ impl<'a> Parser<'a> { ]); Ok(Statement::CreateView { - or_alter, name, columns, query, @@ -5759,68 +4568,9 @@ impl<'a> Parser<'a> { if_not_exists, temporary, to, - params: create_view_params, }) } - /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL]. - /// - /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html - fn parse_create_view_params(&mut self) -> Result, ParserError> { - let algorithm = if self.parse_keyword(Keyword::ALGORITHM) { - self.expect_token(&Token::Eq)?; - Some( - match self.expect_one_of_keywords(&[ - Keyword::UNDEFINED, - Keyword::MERGE, - Keyword::TEMPTABLE, - ])? { - Keyword::UNDEFINED => CreateViewAlgorithm::Undefined, - Keyword::MERGE => CreateViewAlgorithm::Merge, - Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable, - _ => { - self.prev_token(); - let found = self.next_token(); - return self - .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found); - } - }, - ) - } else { - None - }; - let definer = if self.parse_keyword(Keyword::DEFINER) { - self.expect_token(&Token::Eq)?; - Some(self.parse_grantee_name()?) - } else { - None - }; - let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) { - Some( - match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? { - Keyword::DEFINER => CreateViewSecurity::Definer, - Keyword::INVOKER => CreateViewSecurity::Invoker, - _ => { - self.prev_token(); - let found = self.next_token(); - return self.expected("DEFINER or INVOKER after SQL SECURITY", found); - } - }, - ) - } else { - None - }; - if algorithm.is_some() || definer.is_some() || security.is_some() { - Ok(Some(CreateViewParams { - algorithm, - definer, - security, - })) - } else { - Ok(None) - } - } - pub fn parse_create_role(&mut self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let names = self.parse_comma_separated(|p| p.parse_object_name(false))?; @@ -5880,7 +4630,7 @@ impl<'a> Parser<'a> { let loc = self .tokens .get(self.index - 1) - .map_or(Location { line: 0, column: 0 }, |t| t.span.start); + .map_or(Location { line: 0, column: 0 }, |t| t.location); match keyword { Keyword::AUTHORIZATION => { if authorization_owner.is_some() { @@ -5959,7 +4709,7 @@ impl<'a> Parser<'a> { } } Keyword::CONNECTION => { - self.expect_keyword_is(Keyword::LIMIT)?; + self.expect_keyword(Keyword::LIMIT)?; if connection_limit.is_some() { parser_err!("Found multiple CONNECTION LIMIT", loc) } else { @@ -5968,7 +4718,7 @@ impl<'a> Parser<'a> { } } Keyword::VALID => { - self.expect_keyword_is(Keyword::UNTIL)?; + self.expect_keyword(Keyword::UNTIL)?; if valid_until.is_some() { parser_err!("Found multiple VALID UNTIL", loc) } else { @@ -5981,14 +4731,14 @@ impl<'a> Parser<'a> { if !in_role.is_empty() { parser_err!("Found multiple IN ROLE", loc) } else { - in_role = self.parse_comma_separated(|p| p.parse_identifier())?; + in_role = self.parse_comma_separated(|p| p.parse_identifier(false))?; Ok(()) } } else if self.parse_keyword(Keyword::GROUP) { if !in_group.is_empty() { parser_err!("Found multiple IN GROUP", loc) } else { - in_group = self.parse_comma_separated(|p| p.parse_identifier())?; + in_group = self.parse_comma_separated(|p| p.parse_identifier(false))?; Ok(()) } } else { @@ -5999,7 +4749,7 @@ impl<'a> Parser<'a> { if !role.is_empty() { parser_err!("Found multiple ROLE", loc) } else { - role = self.parse_comma_separated(|p| p.parse_identifier())?; + role = self.parse_comma_separated(|p| p.parse_identifier(false))?; Ok(()) } } @@ -6007,7 +4757,7 @@ impl<'a> Parser<'a> { if !user.is_empty() { parser_err!("Found multiple USER", loc) } else { - user = self.parse_comma_separated(|p| p.parse_identifier())?; + user = self.parse_comma_separated(|p| p.parse_identifier(false))?; Ok(()) } } @@ -6015,7 +4765,7 @@ impl<'a> Parser<'a> { if !admin.is_empty() { parser_err!("Found multiple ADMIN", loc) } else { - admin = self.parse_comma_separated(|p| p.parse_identifier())?; + admin = self.parse_comma_separated(|p| p.parse_identifier(false))?; Ok(()) } } @@ -6052,46 +4802,17 @@ impl<'a> Parser<'a> { Some(Keyword::SESSION_USER) => Owner::SessionUser, Some(_) => unreachable!(), None => { - match self.parse_identifier() { + match self.parse_identifier(false) { Ok(ident) => Owner::Ident(ident), Err(e) => { return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}"))) } } - } + }, }; Ok(owner) } - /// Parses a [Statement::CreateDomain] statement. - fn parse_create_domain(&mut self) -> Result { - let name = self.parse_object_name(false)?; - self.expect_keyword_is(Keyword::AS)?; - let data_type = self.parse_data_type()?; - let collation = if self.parse_keyword(Keyword::COLLATE) { - Some(self.parse_identifier()?) - } else { - None - }; - let default = if self.parse_keyword(Keyword::DEFAULT) { - Some(self.parse_expr()?) - } else { - None - }; - let mut constraints = Vec::new(); - while let Some(constraint) = self.parse_optional_table_constraint()? { - constraints.push(constraint); - } - - Ok(Statement::CreateDomain(CreateDomain { - name, - data_type, - collation, - default, - constraints, - })) - } - /// ```sql /// CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ] /// [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ] @@ -6102,8 +4823,8 @@ impl<'a> Parser<'a> { /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html) pub fn parse_create_policy(&mut self) -> Result { - let name = self.parse_identifier()?; - self.expect_keyword_is(Keyword::ON)?; + let name = self.parse_identifier(false)?; + self.expect_keyword(Keyword::ON)?; let table_name = self.parse_object_name(false)?; let policy_type = if self.parse_keyword(Keyword::AS) { @@ -6173,49 +4894,6 @@ impl<'a> Parser<'a> { }) } - /// ```sql - /// CREATE CONNECTOR [IF NOT EXISTS] connector_name - /// [TYPE datasource_type] - /// [URL datasource_url] - /// [COMMENT connector_comment] - /// [WITH DCPROPERTIES(property_name=property_value, ...)] - /// ``` - /// - /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector) - pub fn parse_create_connector(&mut self) -> Result { - let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); - let name = self.parse_identifier()?; - - let connector_type = if self.parse_keyword(Keyword::TYPE) { - Some(self.parse_literal_string()?) - } else { - None - }; - - let url = if self.parse_keyword(Keyword::URL) { - Some(self.parse_literal_string()?) - } else { - None - }; - - let comment = self.parse_optional_inline_comment()?; - - let with_dcproperties = - match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? { - properties if !properties.is_empty() => Some(properties), - _ => None, - }; - - Ok(Statement::CreateConnector(CreateConnector { - name, - if_not_exists, - connector_type, - url, - comment, - with_dcproperties, - })) - } - pub fn parse_drop(&mut self) -> Result { // MySQL dialect supports `TEMPORARY` let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect) @@ -6227,8 +4905,6 @@ impl<'a> Parser<'a> { ObjectType::Table } else if self.parse_keyword(Keyword::VIEW) { ObjectType::View - } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) { - ObjectType::MaterializedView } else if self.parse_keyword(Keyword::INDEX) { ObjectType::Index } else if self.parse_keyword(Keyword::ROLE) { @@ -6247,21 +4923,15 @@ impl<'a> Parser<'a> { return self.parse_drop_function(); } else if self.parse_keyword(Keyword::POLICY) { return self.parse_drop_policy(); - } else if self.parse_keyword(Keyword::CONNECTOR) { - return self.parse_drop_connector(); - } else if self.parse_keyword(Keyword::DOMAIN) { - return self.parse_drop_domain(); } else if self.parse_keyword(Keyword::PROCEDURE) { return self.parse_drop_procedure(); } else if self.parse_keyword(Keyword::SECRET) { return self.parse_drop_secret(temporary, persistent); } else if self.parse_keyword(Keyword::TRIGGER) { return self.parse_drop_trigger(); - } else if self.parse_keyword(Keyword::EXTENSION) { - return self.parse_drop_extension(); } else { return self.expected( - "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, or MATERIALIZED VIEW after DROP", + "TABLE, VIEW, INDEX, ROLE, SCHEMA, DATABASE, FUNCTION, PROCEDURE, STAGE, TRIGGER, SECRET, SEQUENCE, or TYPE after DROP", self.peek_token(), ); }; @@ -6270,7 +4940,7 @@ impl<'a> Parser<'a> { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let names = self.parse_comma_separated(|p| p.parse_object_name(false))?; - let loc = self.peek_token().span.start; + let loc = self.peek_token().location; let cascade = self.parse_keyword(Keyword::CASCADE); let restrict = self.parse_keyword(Keyword::RESTRICT); let purge = self.parse_keyword(Keyword::PURGE); @@ -6283,11 +4953,6 @@ impl<'a> Parser<'a> { loc ); } - let table = if self.parse_keyword(Keyword::ON) { - Some(self.parse_object_name(false)?) - } else { - None - }; Ok(Statement::Drop { object_type, if_exists, @@ -6296,14 +4961,13 @@ impl<'a> Parser<'a> { restrict, purge, temporary, - table, }) } - fn parse_optional_drop_behavior(&mut self) -> Option { + fn parse_optional_referential_action(&mut self) -> Option { match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) { - Some(Keyword::CASCADE) => Some(DropBehavior::Cascade), - Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict), + Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade), + Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict), _ => None, } } @@ -6315,11 +4979,11 @@ impl<'a> Parser<'a> { fn parse_drop_function(&mut self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?; - let drop_behavior = self.parse_optional_drop_behavior(); + let option = self.parse_optional_referential_action(); Ok(Statement::DropFunction { if_exists, func_desc, - drop_behavior, + option, }) } @@ -6330,41 +4994,17 @@ impl<'a> Parser<'a> { /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html) fn parse_drop_policy(&mut self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier()?; - self.expect_keyword_is(Keyword::ON)?; + let name = self.parse_identifier(false)?; + self.expect_keyword(Keyword::ON)?; let table_name = self.parse_object_name(false)?; - let drop_behavior = self.parse_optional_drop_behavior(); + let option = self.parse_optional_referential_action(); Ok(Statement::DropPolicy { if_exists, name, table_name, - drop_behavior, + option, }) } - /// ```sql - /// DROP CONNECTOR [IF EXISTS] name - /// ``` - /// - /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector) - fn parse_drop_connector(&mut self) -> Result { - let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier()?; - Ok(Statement::DropConnector { if_exists, name }) - } - - /// ```sql - /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ] - /// ``` - fn parse_drop_domain(&mut self) -> Result { - let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_object_name(false)?; - let drop_behavior = self.parse_optional_drop_behavior(); - Ok(Statement::DropDomain(DropDomain { - if_exists, - name, - drop_behavior, - })) - } /// ```sql /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...] @@ -6373,11 +5013,11 @@ impl<'a> Parser<'a> { fn parse_drop_procedure(&mut self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?; - let drop_behavior = self.parse_optional_drop_behavior(); + let option = self.parse_optional_referential_action(); Ok(Statement::DropProcedure { if_exists, proc_desc, - drop_behavior, + option, }) } @@ -6406,9 +5046,9 @@ impl<'a> Parser<'a> { persistent: bool, ) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let storage_specifier = if self.parse_keyword(Keyword::FROM) { - self.parse_identifier().ok() + self.parse_identifier(false).ok() } else { None }; @@ -6447,7 +5087,7 @@ impl<'a> Parser<'a> { return self.parse_mssql_declare(); } - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let binary = Some(self.parse_keyword(Keyword::BINARY)); let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) { @@ -6465,12 +5105,12 @@ impl<'a> Parser<'a> { None }; - self.expect_keyword_is(Keyword::CURSOR)?; + self.expect_keyword(Keyword::CURSOR)?; let declare_type = Some(DeclareType::Cursor); let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) { Some(keyword) => { - self.expect_keyword_is(Keyword::HOLD)?; + self.expect_keyword(Keyword::HOLD)?; match keyword { Keyword::WITH => Some(true), @@ -6481,7 +5121,7 @@ impl<'a> Parser<'a> { None => None, }; - self.expect_keyword_is(Keyword::FOR)?; + self.expect_keyword(Keyword::FOR)?; let query = Some(self.parse_query()?); @@ -6508,7 +5148,7 @@ impl<'a> Parser<'a> { /// ``` /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare pub fn parse_big_query_declare(&mut self) -> Result { - let names = self.parse_comma_separated(Parser::parse_identifier)?; + let names = self.parse_comma_separated(|parser| Parser::parse_identifier(parser, false))?; let data_type = match self.peek_token().token { Token::Word(w) if w.keyword == Keyword::DEFAULT => None, @@ -6524,7 +5164,7 @@ impl<'a> Parser<'a> { } else { // If no variable type - default expression must be specified, per BQ docs. // i.e `DECLARE foo;` is invalid. - self.expect_keyword_is(Keyword::DEFAULT)?; + self.expect_keyword(Keyword::DEFAULT)?; Some(self.parse_expr()?) }; @@ -6570,10 +5210,10 @@ impl<'a> Parser<'a> { pub fn parse_snowflake_declare(&mut self) -> Result { let mut stmts = vec![]; loop { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let (declare_type, for_query, assigned_expr, data_type) = if self.parse_keyword(Keyword::CURSOR) { - self.expect_keyword_is(Keyword::FOR)?; + self.expect_keyword(Keyword::FOR)?; match self.peek_token().token { Token::Word(w) if w.keyword == Keyword::SELECT => ( Some(DeclareType::Cursor), @@ -6666,80 +5306,62 @@ impl<'a> Parser<'a> { /// DECLARE // { // { @local_variable [AS] data_type [ = value ] } - // | { @cursor_variable_name CURSOR [ FOR ] } + // | { @cursor_variable_name CURSOR } // } [ ,...n ] /// ``` /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16 pub fn parse_mssql_declare(&mut self) -> Result { - let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?; + let mut stmts = vec![]; + + loop { + let name = { + let ident = self.parse_identifier(false)?; + if !ident.value.starts_with('@') { + Err(ParserError::TokenizerError( + "Invalid MsSql variable declaration.".to_string(), + )) + } else { + Ok(ident) + } + }?; + + let (declare_type, data_type) = match self.peek_token().token { + Token::Word(w) => match w.keyword { + Keyword::CURSOR => { + self.next_token(); + (Some(DeclareType::Cursor), None) + } + Keyword::AS => { + self.next_token(); + (None, Some(self.parse_data_type()?)) + } + _ => (None, Some(self.parse_data_type()?)), + }, + _ => (None, Some(self.parse_data_type()?)), + }; + + let assignment = self.parse_mssql_variable_declaration_expression()?; + + stmts.push(Declare { + names: vec![name], + data_type, + assignment, + declare_type, + binary: None, + sensitive: None, + scroll: None, + hold: None, + for_query: None, + }); + + if self.next_token() != Token::Comma { + break; + } + } Ok(Statement::Declare { stmts }) } - /// Parse the body of a [MsSql] `DECLARE`statement. - /// - /// Syntax: - /// ```text - // { - // { @local_variable [AS] data_type [ = value ] } - // | { @cursor_variable_name CURSOR [ FOR ]} - // } [ ,...n ] - /// ``` - /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16 - pub fn parse_mssql_declare_stmt(&mut self) -> Result { - let name = { - let ident = self.parse_identifier()?; - if !ident.value.starts_with('@') - && !matches!( - self.peek_token().token, - Token::Word(w) if w.keyword == Keyword::CURSOR - ) - { - Err(ParserError::TokenizerError( - "Invalid MsSql variable declaration.".to_string(), - )) - } else { - Ok(ident) - } - }?; - - let (declare_type, data_type) = match self.peek_token().token { - Token::Word(w) => match w.keyword { - Keyword::CURSOR => { - self.next_token(); - (Some(DeclareType::Cursor), None) - } - Keyword::AS => { - self.next_token(); - (None, Some(self.parse_data_type()?)) - } - _ => (None, Some(self.parse_data_type()?)), - }, - _ => (None, Some(self.parse_data_type()?)), - }; - - let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) { - self.next_token(); - let query = Some(self.parse_query()?); - (query, None) - } else { - let assignment = self.parse_mssql_variable_declaration_expression()?; - (None, assignment) - }; - - Ok(Declare { - names: vec![name], - data_type, - assignment, - declare_type, - binary: None, - sensitive: None, - scroll: None, - hold: None, - for_query, - }) - } - /// Parses the assigned expression in a variable declaration. /// /// Syntax: @@ -6797,11 +5419,11 @@ impl<'a> Parser<'a> { FetchDirection::Last } else if self.parse_keyword(Keyword::ABSOLUTE) { FetchDirection::Absolute { - limit: self.parse_number_value()?.value, + limit: self.parse_number_value()?, } } else if self.parse_keyword(Keyword::RELATIVE) { FetchDirection::Relative { - limit: self.parse_number_value()?.value, + limit: self.parse_number_value()?, } } else if self.parse_keyword(Keyword::FORWARD) { if self.parse_keyword(Keyword::ALL) { @@ -6809,7 +5431,7 @@ impl<'a> Parser<'a> { } else { FetchDirection::Forward { // TODO: Support optional - limit: Some(self.parse_number_value()?.value), + limit: Some(self.parse_number_value()?), } } } else if self.parse_keyword(Keyword::BACKWARD) { @@ -6818,28 +5440,20 @@ impl<'a> Parser<'a> { } else { FetchDirection::Backward { // TODO: Support optional - limit: Some(self.parse_number_value()?.value), + limit: Some(self.parse_number_value()?), } } } else if self.parse_keyword(Keyword::ALL) { FetchDirection::All } else { FetchDirection::Count { - limit: self.parse_number_value()?.value, + limit: self.parse_number_value()?, } }; - let position = if self.peek_keyword(Keyword::FROM) { - self.expect_keyword(Keyword::FROM)?; - FetchPosition::From - } else if self.peek_keyword(Keyword::IN) { - self.expect_keyword(Keyword::IN)?; - FetchPosition::In - } else { - return parser_err!("Expected FROM or IN", self.peek_token().span.start); - }; + self.expect_one_of_keywords(&[Keyword::FROM, Keyword::IN])?; - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let into = if self.parse_keyword(Keyword::INTO) { Some(self.parse_object_name(false)?) @@ -6850,7 +5464,6 @@ impl<'a> Parser<'a> { Ok(Statement::Fetch { name, direction, - position, into, }) } @@ -6878,23 +5491,24 @@ impl<'a> Parser<'a> { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) { let index_name = self.parse_object_name(false)?; - self.expect_keyword_is(Keyword::ON)?; + self.expect_keyword(Keyword::ON)?; Some(index_name) } else { None }; let table_name = self.parse_object_name(false)?; let using = if self.parse_keyword(Keyword::USING) { - Some(self.parse_index_type()?) + Some(self.parse_identifier(false)?) } else { None }; - - let columns = self.parse_parenthesized_index_column_list()?; + self.expect_token(&Token::LParen)?; + let columns = self.parse_comma_separated(Parser::parse_order_by_expr)?; + self.expect_token(&Token::RParen)?; let include = if self.parse_keyword(Keyword::INCLUDE) { self.expect_token(&Token::LParen)?; - let columns = self.parse_comma_separated(|p| p.parse_identifier())?; + let columns = self.parse_comma_separated(|p| p.parse_identifier(false))?; self.expect_token(&Token::RParen)?; columns } else { @@ -6903,7 +5517,7 @@ impl<'a> Parser<'a> { let nulls_distinct = if self.parse_keyword(Keyword::NULLS) { let not = self.parse_keyword(Keyword::NOT); - self.expect_keyword_is(Keyword::DISTINCT)?; + self.expect_keyword(Keyword::DISTINCT)?; Some(!not) } else { None @@ -6943,17 +5557,17 @@ impl<'a> Parser<'a> { pub fn parse_create_extension(&mut self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) { let schema = if self.parse_keyword(Keyword::SCHEMA) { - Some(self.parse_identifier()?) + Some(self.parse_identifier(false)?) } else { None }; let version = if self.parse_keyword(Keyword::VERSION) { - Some(self.parse_identifier()?) + Some(self.parse_identifier(false)?) } else { None }; @@ -6974,25 +5588,6 @@ impl<'a> Parser<'a> { }) } - /// Parse a PostgreSQL-specific [Statement::DropExtension] statement. - pub fn parse_drop_extension(&mut self) -> Result { - let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let names = self.parse_comma_separated(|p| p.parse_identifier())?; - let cascade_or_restrict = - self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]); - Ok(Statement::DropExtension { - names, - if_exists, - cascade_or_restrict: cascade_or_restrict - .map(|k| match k { - Keyword::CASCADE => Ok(ReferentialAction::Cascade), - Keyword::RESTRICT => Ok(ReferentialAction::Restrict), - _ => self.expected("CASCADE or RESTRICT", self.peek_token()), - }) - .transpose()?, - }) - } - //TODO: Implement parsing for Skewed pub fn parse_hive_distribution(&mut self) -> Result { if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) { @@ -7018,10 +5613,10 @@ impl<'a> Parser<'a> { hive_format.row_format = Some(self.parse_row_format()?); } Some(Keyword::STORED) => { - self.expect_keyword_is(Keyword::AS)?; + self.expect_keyword(Keyword::AS)?; if self.parse_keyword(Keyword::INPUTFORMAT) { let input_format = self.parse_expr()?; - self.expect_keyword_is(Keyword::OUTPUTFORMAT)?; + self.expect_keyword(Keyword::OUTPUTFORMAT)?; let output_format = self.parse_expr()?; hive_format.storage = Some(HiveIOFormat::IOF { input_format, @@ -7054,7 +5649,7 @@ impl<'a> Parser<'a> { } pub fn parse_row_format(&mut self) -> Result { - self.expect_keyword_is(Keyword::FORMAT)?; + self.expect_keyword(Keyword::FORMAT)?; match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) { Some(Keyword::SERDE) => { let class = self.parse_literal_string()?; @@ -7075,13 +5670,13 @@ impl<'a> Parser<'a> { if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) { row_delimiters.push(HiveRowDelimiter { delimiter: HiveDelimiter::FieldsTerminatedBy, - char: self.parse_identifier()?, + char: self.parse_identifier(false)?, }); if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) { row_delimiters.push(HiveRowDelimiter { delimiter: HiveDelimiter::FieldsEscapedBy, - char: self.parse_identifier()?, + char: self.parse_identifier(false)?, }); } } else { @@ -7096,7 +5691,7 @@ impl<'a> Parser<'a> { ]) { row_delimiters.push(HiveRowDelimiter { delimiter: HiveDelimiter::CollectionItemsTerminatedBy, - char: self.parse_identifier()?, + char: self.parse_identifier(false)?, }); } else { break; @@ -7110,7 +5705,7 @@ impl<'a> Parser<'a> { ]) { row_delimiters.push(HiveRowDelimiter { delimiter: HiveDelimiter::MapKeysTerminatedBy, - char: self.parse_identifier()?, + char: self.parse_identifier(false)?, }); } else { break; @@ -7120,7 +5715,7 @@ impl<'a> Parser<'a> { if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) { row_delimiters.push(HiveRowDelimiter { delimiter: HiveDelimiter::LinesTerminatedBy, - char: self.parse_identifier()?, + char: self.parse_identifier(false)?, }); } else { break; @@ -7130,7 +5725,7 @@ impl<'a> Parser<'a> { if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) { row_delimiters.push(HiveRowDelimiter { delimiter: HiveDelimiter::NullDefinedAs, - char: self.parse_identifier()?, + char: self.parse_identifier(false)?, }); } else { break; @@ -7151,7 +5746,7 @@ impl<'a> Parser<'a> { fn parse_optional_on_cluster(&mut self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) { - Ok(Some(self.parse_identifier()?)) + Ok(Some(self.parse_identifier(false)?)) } else { Ok(None) } @@ -7185,16 +5780,17 @@ impl<'a> Parser<'a> { // parse optional column list (schema) let (columns, constraints) = self.parse_columns()?; - let comment_after_column_def = - if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) { - let next_token = self.next_token(); - match next_token.token { - Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)), - _ => self.expected("comment", next_token)?, - } - } else { - None - }; + let mut comment = if dialect_of!(self is HiveDialect) + && self.parse_keyword(Keyword::COMMENT) + { + let next_token = self.next_token(); + match next_token.token { + Token::SingleQuotedString(str) => Some(CommentDef::AfterColumnDefsWithoutEq(str)), + _ => self.expected("comment", next_token)?, + } + } else { + None + }; // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE` let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]); @@ -7202,8 +5798,39 @@ impl<'a> Parser<'a> { let hive_distribution = self.parse_hive_distribution()?; let clustered_by = self.parse_optional_clustered_by()?; let hive_formats = self.parse_hive_formats()?; + // PostgreSQL supports `WITH ( options )`, before `AS` + let with_options = self.parse_options(Keyword::WITH)?; + let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?; - let create_table_config = self.parse_optional_create_table_config()?; + let engine = if self.parse_keyword(Keyword::ENGINE) { + self.expect_token(&Token::Eq)?; + let next_token = self.next_token(); + match next_token.token { + Token::Word(w) => { + let name = w.value; + let parameters = if self.peek_token() == Token::LParen { + Some(self.parse_parenthesized_identifiers()?) + } else { + None + }; + Some(TableEngine { name, parameters }) + } + _ => self.expected("identifier", next_token)?, + } + } else { + None + }; + + let auto_increment_offset = if self.parse_keyword(Keyword::AUTO_INCREMENT) { + let _ = self.consume_token(&Token::Eq); + let next_token = self.next_token(); + match next_token.token { + Token::Number(s, _) => Some(Self::parse::(s, next_token.location)?), + _ => self.expected("literal int", next_token)?, + } + } else { + None + }; // ClickHouse supports `PRIMARY KEY`, before `ORDER BY` // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key @@ -7231,22 +5858,59 @@ impl<'a> Parser<'a> { None }; - let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) { - Some(self.parse_create_table_on_commit()?) + let create_table_config = self.parse_optional_create_table_config()?; + + let default_charset = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) { + self.expect_token(&Token::Eq)?; + let next_token = self.next_token(); + match next_token.token { + Token::Word(w) => Some(w.value), + _ => self.expected("identifier", next_token)?, + } } else { None }; + let collation = if self.parse_keywords(&[Keyword::COLLATE]) { + self.expect_token(&Token::Eq)?; + let next_token = self.next_token(); + match next_token.token { + Token::Word(w) => Some(w.value), + _ => self.expected("identifier", next_token)?, + } + } else { + None + }; + + let on_commit: Option = + if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT, Keyword::DELETE, Keyword::ROWS]) + { + Some(OnCommit::DeleteRows) + } else if self.parse_keywords(&[ + Keyword::ON, + Keyword::COMMIT, + Keyword::PRESERVE, + Keyword::ROWS, + ]) { + Some(OnCommit::PreserveRows) + } else if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT, Keyword::DROP]) { + Some(OnCommit::Drop) + } else { + None + }; + let strict = self.parse_keyword(Keyword::STRICT); + // Excludes Hive dialect here since it has been handled after table column definitions. + if !dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) { + // rewind the COMMENT keyword + self.prev_token(); + comment = self.parse_optional_inline_comment()? + }; + // Parse optional `AS ( query )` let query = if self.parse_keyword(Keyword::AS) { Some(self.parse_query()?) - } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT) - { - // rewind the SELECT keyword - self.prev_token(); - Some(self.parse_query()?) } else { None }; @@ -7255,6 +5919,8 @@ impl<'a> Parser<'a> { .temporary(temporary) .columns(columns) .constraints(constraints) + .with_options(with_options) + .table_properties(table_properties) .or_replace(or_replace) .if_not_exists(if_not_exists) .transient(transient) @@ -7265,61 +5931,30 @@ impl<'a> Parser<'a> { .without_rowid(without_rowid) .like(like) .clone_clause(clone) - .comment_after_column_def(comment_after_column_def) + .engine(engine) + .comment(comment) + .auto_increment_offset(auto_increment_offset) .order_by(order_by) + .default_charset(default_charset) + .collation(collation) .on_commit(on_commit) .on_cluster(on_cluster) .clustered_by(clustered_by) .partition_by(create_table_config.partition_by) .cluster_by(create_table_config.cluster_by) - .inherits(create_table_config.inherits) - .table_options(create_table_config.table_options) + .options(create_table_config.options) .primary_key(primary_key) .strict(strict) .build()) } - pub(crate) fn parse_create_table_on_commit(&mut self) -> Result { - if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) { - Ok(OnCommit::DeleteRows) - } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) { - Ok(OnCommit::PreserveRows) - } else if self.parse_keywords(&[Keyword::DROP]) { - Ok(OnCommit::Drop) - } else { - parser_err!( - "Expecting DELETE ROWS, PRESERVE ROWS or DROP", - self.peek_token() - ) - } - } - - /// Parse configuration like inheritance, partitioning, clustering information during the table creation. + /// Parse configuration like partitioning, clustering information during the table creation. /// /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2) /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html) - /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html) fn parse_optional_create_table_config( &mut self, ) -> Result { - let mut table_options = CreateTableOptions::None; - - let inherits = if self.parse_keyword(Keyword::INHERITS) { - Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?) - } else { - None - }; - - // PostgreSQL supports `WITH ( options )`, before `AS` - let with_options = self.parse_options(Keyword::WITH)?; - if !with_options.is_empty() { - table_options = CreateTableOptions::With(with_options) - } - - let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?; - if !table_properties.is_empty() { - table_options = CreateTableOptions::TableProperties(table_properties); - } let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect) && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) { @@ -7329,264 +5964,46 @@ impl<'a> Parser<'a> { }; let mut cluster_by = None; + let mut options = None; if dialect_of!(self is BigQueryDialect | GenericDialect) { if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) { cluster_by = Some(WrappedCollection::NoWrapping( - self.parse_comma_separated(|p| p.parse_expr())?, + self.parse_comma_separated(|p| p.parse_identifier(false))?, )); }; if let Token::Word(word) = self.peek_token().token { if word.keyword == Keyword::OPTIONS { - table_options = - CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?) + options = Some(self.parse_options(Keyword::OPTIONS)?); } }; } - if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None { - let plain_options = self.parse_plain_options()?; - if !plain_options.is_empty() { - table_options = CreateTableOptions::Plain(plain_options) - } - }; - Ok(CreateTableConfiguration { partition_by, cluster_by, - inherits, - table_options, + options, }) } - fn parse_plain_option(&mut self) -> Result, ParserError> { - // Single parameter option - // - if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) { - return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION")))); - } - - // Custom option - // - if self.parse_keywords(&[Keyword::COMMENT]) { - let has_eq = self.consume_token(&Token::Eq); - let value = self.next_token(); - - let comment = match (has_eq, value.token) { - (true, Token::SingleQuotedString(s)) => { - Ok(Some(SqlOption::Comment(CommentDef::WithEq(s)))) - } - (false, Token::SingleQuotedString(s)) => { - Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s)))) - } - (_, token) => { - self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token)) - } - }; - return comment; - } - - // - // - if self.parse_keywords(&[Keyword::ENGINE]) { - let _ = self.consume_token(&Token::Eq); - let value = self.next_token(); - - let engine = match value.token { - Token::Word(w) => { - let parameters = if self.peek_token() == Token::LParen { - self.parse_parenthesized_identifiers()? - } else { - vec![] - }; - - Ok(Some(SqlOption::NamedParenthesizedList( - NamedParenthesizedList { - key: Ident::new("ENGINE"), - name: Some(Ident::new(w.value)), - values: parameters, - }, - ))) - } - _ => { - return self.expected("Token::Word", value)?; - } - }; - - return engine; - } - - // - if self.parse_keywords(&[Keyword::TABLESPACE]) { - let _ = self.consume_token(&Token::Eq); - let value = self.next_token(); - - let tablespace = match value.token { - Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => { - let storage = match self.parse_keyword(Keyword::STORAGE) { - true => { - let _ = self.consume_token(&Token::Eq); - let storage_token = self.next_token(); - match &storage_token.token { - Token::Word(w) => match w.value.to_uppercase().as_str() { - "DISK" => Some(StorageType::Disk), - "MEMORY" => Some(StorageType::Memory), - _ => self - .expected("Storage type (DISK or MEMORY)", storage_token)?, - }, - _ => self.expected("Token::Word", storage_token)?, - } - } - false => None, - }; - - Ok(Some(SqlOption::TableSpace(TablespaceOption { - name, - storage, - }))) - } - _ => { - return self.expected("Token::Word", value)?; - } - }; - - return tablespace; - } - - // - if self.parse_keyword(Keyword::UNION) { - let _ = self.consume_token(&Token::Eq); - let value = self.next_token(); - - match value.token { - Token::LParen => { - let tables: Vec = - self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?; - self.expect_token(&Token::RParen)?; - - return Ok(Some(SqlOption::NamedParenthesizedList( - NamedParenthesizedList { - key: Ident::new("UNION"), - name: None, - values: tables, - }, - ))); - } - _ => { - return self.expected("Token::LParen", value)?; - } - } - } - - // Key/Value parameter option - let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) { - Ident::new("DEFAULT CHARSET") - } else if self.parse_keyword(Keyword::CHARSET) { - Ident::new("CHARSET") - } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) { - Ident::new("DEFAULT CHARACTER SET") - } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) { - Ident::new("CHARACTER SET") - } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) { - Ident::new("DEFAULT COLLATE") - } else if self.parse_keyword(Keyword::COLLATE) { - Ident::new("COLLATE") - } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) { - Ident::new("DATA DIRECTORY") - } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) { - Ident::new("INDEX DIRECTORY") - } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) { - Ident::new("KEY_BLOCK_SIZE") - } else if self.parse_keyword(Keyword::ROW_FORMAT) { - Ident::new("ROW_FORMAT") - } else if self.parse_keyword(Keyword::PACK_KEYS) { - Ident::new("PACK_KEYS") - } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) { - Ident::new("STATS_AUTO_RECALC") - } else if self.parse_keyword(Keyword::STATS_PERSISTENT) { - Ident::new("STATS_PERSISTENT") - } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) { - Ident::new("STATS_SAMPLE_PAGES") - } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) { - Ident::new("DELAY_KEY_WRITE") - } else if self.parse_keyword(Keyword::COMPRESSION) { - Ident::new("COMPRESSION") - } else if self.parse_keyword(Keyword::ENCRYPTION) { - Ident::new("ENCRYPTION") - } else if self.parse_keyword(Keyword::MAX_ROWS) { - Ident::new("MAX_ROWS") - } else if self.parse_keyword(Keyword::MIN_ROWS) { - Ident::new("MIN_ROWS") - } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) { - Ident::new("AUTOEXTEND_SIZE") - } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) { - Ident::new("AVG_ROW_LENGTH") - } else if self.parse_keyword(Keyword::CHECKSUM) { - Ident::new("CHECKSUM") - } else if self.parse_keyword(Keyword::CONNECTION) { - Ident::new("CONNECTION") - } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) { - Ident::new("ENGINE_ATTRIBUTE") - } else if self.parse_keyword(Keyword::PASSWORD) { - Ident::new("PASSWORD") - } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) { - Ident::new("SECONDARY_ENGINE_ATTRIBUTE") - } else if self.parse_keyword(Keyword::INSERT_METHOD) { - Ident::new("INSERT_METHOD") - } else if self.parse_keyword(Keyword::AUTO_INCREMENT) { - Ident::new("AUTO_INCREMENT") - } else { - return Ok(None); - }; - - let _ = self.consume_token(&Token::Eq); - - let value = match self - .maybe_parse(|parser| parser.parse_value())? - .map(Expr::Value) - { - Some(expr) => expr, - None => Expr::Identifier(self.parse_identifier()?), - }; - - Ok(Some(SqlOption::KeyValue { key, value })) - } - - pub fn parse_plain_options(&mut self) -> Result, ParserError> { - let mut options = Vec::new(); - - while let Some(option) = self.parse_plain_option()? { - options.push(option); - } - - Ok(options) - } - pub fn parse_optional_inline_comment(&mut self) -> Result, ParserError> { let comment = if self.parse_keyword(Keyword::COMMENT) { let has_eq = self.consume_token(&Token::Eq); - let comment = self.parse_comment_value()?; - Some(if has_eq { - CommentDef::WithEq(comment) - } else { - CommentDef::WithoutEq(comment) - }) + let next_token = self.next_token(); + match next_token.token { + Token::SingleQuotedString(str) => Some(if has_eq { + CommentDef::WithEq(str) + } else { + CommentDef::WithoutEq(str) + }), + _ => self.expected("comment", next_token)?, + } } else { None }; Ok(comment) } - pub fn parse_comment_value(&mut self) -> Result { - let next_token = self.next_token(); - let value = match next_token.token { - Token::SingleQuotedString(str) => str, - Token::DollarQuotedString(str) => str.value, - _ => self.expected("string literal", next_token)?, - }; - Ok(value) - } - pub fn parse_optional_procedure_parameters( &mut self, ) -> Result>, ParserError> { @@ -7632,11 +6049,7 @@ impl<'a> Parser<'a> { return self.expected("',' or ')' after column definition", self.peek_token()); }; - if rparen - && (!comma - || self.dialect.supports_column_definition_trailing_commas() - || self.options.trailing_commas) - { + if rparen && (!comma || self.options.trailing_commas) { let _ = self.consume_token(&Token::RParen); break; } @@ -7646,35 +6059,27 @@ impl<'a> Parser<'a> { } pub fn parse_procedure_param(&mut self) -> Result { - let mode = if self.parse_keyword(Keyword::IN) { - Some(ArgMode::In) - } else if self.parse_keyword(Keyword::OUT) { - Some(ArgMode::Out) - } else if self.parse_keyword(Keyword::INOUT) { - Some(ArgMode::InOut) - } else { - None - }; - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let data_type = self.parse_data_type()?; - Ok(ProcedureParam { - name, - data_type, - mode, - }) + Ok(ProcedureParam { name, data_type }) } pub fn parse_column_def(&mut self) -> Result { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let data_type = if self.is_column_type_sqlite_unspecified() { DataType::Unspecified } else { self.parse_data_type()? }; + let mut collation = if self.parse_keyword(Keyword::COLLATE) { + Some(self.parse_object_name(false)?) + } else { + None + }; let mut options = vec![]; loop { if self.parse_keyword(Keyword::CONSTRAINT) { - let name = Some(self.parse_identifier()?); + let name = Some(self.parse_identifier(false)?); if let Some(option) = self.parse_optional_column_option()? { options.push(ColumnOptionDef { name, option }); } else { @@ -7685,6 +6090,10 @@ impl<'a> Parser<'a> { } } else if let Some(option) = self.parse_optional_column_option()? { options.push(ColumnOptionDef { name: None, option }); + } else if dialect_of!(self is MySqlDialect | SnowflakeDialect | GenericDialect) + && self.parse_keyword(Keyword::COLLATE) + { + collation = Some(self.parse_object_name(false)?); } else { break; }; @@ -7692,6 +6101,7 @@ impl<'a> Parser<'a> { Ok(ColumnDef { name, data_type, + collation, options, }) } @@ -7728,14 +6138,14 @@ impl<'a> Parser<'a> { Ok(Some(ColumnOption::CharacterSet( self.parse_object_name(false)?, ))) - } else if self.parse_keywords(&[Keyword::COLLATE]) { - Ok(Some(ColumnOption::Collation( - self.parse_object_name(false)?, - ))) } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) { Ok(Some(ColumnOption::NotNull)) } else if self.parse_keywords(&[Keyword::COMMENT]) { - Ok(Some(ColumnOption::Comment(self.parse_comment_value()?))) + let next_token = self.next_token(); + match next_token.token { + Token::SingleQuotedString(value, ..) => Ok(Some(ColumnOption::Comment(value))), + _ => self.expected("string", next_token), + } } else if self.parse_keyword(Keyword::NULL) { Ok(Some(ColumnOption::Null)) } else if self.parse_keyword(Keyword::DEFAULT) { @@ -7848,10 +6258,6 @@ impl<'a> Parser<'a> { && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect) { self.parse_optional_column_option_as() - } else if self.parse_keyword(Keyword::SRID) - && dialect_of!(self is MySqlDialect | GenericDialect) - { - Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?)))) } else if self.parse_keyword(Keyword::IDENTITY) && dialect_of!(self is MsSqlDialect | GenericDialect) { @@ -7892,7 +6298,7 @@ impl<'a> Parser<'a> { } pub(crate) fn parse_tag(&mut self) -> Result { - let name = self.parse_object_name(false)?; + let name = self.parse_identifier(false)?; self.expect_token(&Token::Eq)?; let value = self.parse_literal_string()?; @@ -8007,9 +6413,9 @@ impl<'a> Parser<'a> { None }; - self.expect_keyword_is(Keyword::INTO)?; - let num_buckets = self.parse_number_value()?.value; - self.expect_keyword_is(Keyword::BUCKETS)?; + self.expect_keyword(Keyword::INTO)?; + let num_buckets = self.parse_number_value()?; + self.expect_keyword(Keyword::BUCKETS)?; Some(ClusteredBy { columns, sorted_by, @@ -8081,7 +6487,7 @@ impl<'a> Parser<'a> { &mut self, ) -> Result, ParserError> { let name = if self.parse_keyword(Keyword::CONSTRAINT) { - Some(self.parse_identifier()?) + Some(self.parse_identifier(false)?) } else { None }; @@ -8097,13 +6503,11 @@ impl<'a> Parser<'a> { .expected("`index_name` or `(column_name [, ...])`", self.peek_token()); } - let nulls_distinct = self.parse_optional_nulls_distinct()?; - // optional index name - let index_name = self.parse_optional_ident()?; + let index_name = self.parse_optional_indent()?; let index_type = self.parse_optional_using_then_index_type()?; - let columns = self.parse_parenthesized_index_column_list()?; + let columns = self.parse_parenthesized_column_list(Mandatory, false)?; let index_options = self.parse_index_options()?; let characteristics = self.parse_constraint_characteristics()?; Ok(Some(TableConstraint::Unique { @@ -8114,18 +6518,17 @@ impl<'a> Parser<'a> { columns, index_options, characteristics, - nulls_distinct, })) } Token::Word(w) if w.keyword == Keyword::PRIMARY => { // after `PRIMARY` always stay `KEY` - self.expect_keyword_is(Keyword::KEY)?; + self.expect_keyword(Keyword::KEY)?; // optional index name - let index_name = self.parse_optional_ident()?; + let index_name = self.parse_optional_indent()?; let index_type = self.parse_optional_using_then_index_type()?; - let columns = self.parse_parenthesized_index_column_list()?; + let columns = self.parse_parenthesized_column_list(Mandatory, false)?; let index_options = self.parse_index_options()?; let characteristics = self.parse_constraint_characteristics()?; Ok(Some(TableConstraint::PrimaryKey { @@ -8138,12 +6541,11 @@ impl<'a> Parser<'a> { })) } Token::Word(w) if w.keyword == Keyword::FOREIGN => { - self.expect_keyword_is(Keyword::KEY)?; - let index_name = self.parse_optional_ident()?; + self.expect_keyword(Keyword::KEY)?; let columns = self.parse_parenthesized_column_list(Mandatory, false)?; - self.expect_keyword_is(Keyword::REFERENCES)?; + self.expect_keyword(Keyword::REFERENCES)?; let foreign_table = self.parse_object_name(false)?; - let referred_columns = self.parse_parenthesized_column_list(Optional, false)?; + let referred_columns = self.parse_parenthesized_column_list(Mandatory, false)?; let mut on_delete = None; let mut on_update = None; loop { @@ -8162,7 +6564,6 @@ impl<'a> Parser<'a> { Ok(Some(TableConstraint::ForeignKey { name, - index_name, columns, foreign_table, referred_columns, @@ -8175,20 +6576,7 @@ impl<'a> Parser<'a> { self.expect_token(&Token::LParen)?; let expr = Box::new(self.parse_expr()?); self.expect_token(&Token::RParen)?; - - let enforced = if self.parse_keyword(Keyword::ENFORCED) { - Some(true) - } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) { - Some(false) - } else { - None - }; - - Ok(Some(TableConstraint::Check { - name, - expr, - enforced, - })) + Ok(Some(TableConstraint::Check { name, expr })) } Token::Word(w) if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY) @@ -8199,11 +6587,11 @@ impl<'a> Parser<'a> { let name = match self.peek_token().token { Token::Word(word) if word.keyword == Keyword::USING => None, - _ => self.parse_optional_ident()?, + _ => self.parse_optional_indent()?, }; let index_type = self.parse_optional_using_then_index_type()?; - let columns = self.parse_parenthesized_index_column_list()?; + let columns = self.parse_parenthesized_column_list(Mandatory, false)?; Ok(Some(TableConstraint::Index { display_as_key, @@ -8219,9 +6607,9 @@ impl<'a> Parser<'a> { if let Some(name) = name { return self.expected( "FULLTEXT or SPATIAL option without constraint name", - TokenWithSpan { + TokenWithLocation { token: Token::make_keyword(&name.to_string()), - span: next_token.span, + location: next_token.location, }, ); } @@ -8230,9 +6618,9 @@ impl<'a> Parser<'a> { let index_type_display = self.parse_index_type_display(); - let opt_index_name = self.parse_optional_ident()?; + let opt_index_name = self.parse_optional_indent()?; - let columns = self.parse_parenthesized_index_column_list()?; + let columns = self.parse_parenthesized_column_list(Mandatory, false)?; Ok(Some(TableConstraint::FulltextOrSpatial { fulltext, @@ -8252,20 +6640,6 @@ impl<'a> Parser<'a> { } } - fn parse_optional_nulls_distinct(&mut self) -> Result { - Ok(if self.parse_keyword(Keyword::NULLS) { - let not = self.parse_keyword(Keyword::NOT); - self.expect_keyword_is(Keyword::DISTINCT)?; - if not { - NullsDistinctOption::NotDistinct - } else { - NullsDistinctOption::Distinct - } - } else { - NullsDistinctOption::None - }) - } - pub fn maybe_parse_options( &mut self, keyword: Keyword, @@ -8281,7 +6655,7 @@ impl<'a> Parser<'a> { pub fn parse_options(&mut self, keyword: Keyword) -> Result, ParserError> { if self.parse_keyword(keyword) { self.expect_token(&Token::LParen)?; - let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?; + let options = self.parse_comma_separated(Parser::parse_sql_option)?; self.expect_token(&Token::RParen)?; Ok(options) } else { @@ -8304,30 +6678,16 @@ impl<'a> Parser<'a> { } pub fn parse_index_type(&mut self) -> Result { - Ok(if self.parse_keyword(Keyword::BTREE) { - IndexType::BTree + if self.parse_keyword(Keyword::BTREE) { + Ok(IndexType::BTree) } else if self.parse_keyword(Keyword::HASH) { - IndexType::Hash - } else if self.parse_keyword(Keyword::GIN) { - IndexType::GIN - } else if self.parse_keyword(Keyword::GIST) { - IndexType::GiST - } else if self.parse_keyword(Keyword::SPGIST) { - IndexType::SPGiST - } else if self.parse_keyword(Keyword::BRIN) { - IndexType::BRIN - } else if self.parse_keyword(Keyword::BLOOM) { - IndexType::Bloom + Ok(IndexType::Hash) } else { - IndexType::Custom(self.parse_identifier()?) - }) + self.expected("index type {BTREE | HASH}", self.peek_token()) + } } - /// Optionally parse the `USING` keyword, followed by an [IndexType] - /// Example: - /// ```sql - //// USING BTREE (name, age DESC) - /// ``` + /// Parse [USING {BTREE | HASH}] pub fn parse_optional_using_then_index_type( &mut self, ) -> Result, ParserError> { @@ -8340,8 +6700,8 @@ impl<'a> Parser<'a> { /// Parse `[ident]`, mostly `ident` is name, like: /// `window_name`, `index_name`, ... - pub fn parse_optional_ident(&mut self) -> Result, ParserError> { - self.maybe_parse(|parser| parser.parse_identifier()) + pub fn parse_optional_indent(&mut self) -> Result, ParserError> { + self.maybe_parse(|parser| parser.parse_identifier(false)) } #[must_use] @@ -8382,7 +6742,7 @@ impl<'a> Parser<'a> { match self.peek_token().token { Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => { - Ok(SqlOption::Ident(self.parse_identifier()?)) + Ok(SqlOption::Ident(self.parse_identifier(false)?)) } Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => { self.parse_option_partition() @@ -8391,7 +6751,7 @@ impl<'a> Parser<'a> { self.parse_option_clustered() } _ => { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; self.expect_token(&Token::Eq)?; let value = self.parse_expr()?; @@ -8420,7 +6780,7 @@ impl<'a> Parser<'a> { self.expect_token(&Token::LParen)?; let columns = self.parse_comma_separated(|p| { - let name = p.parse_identifier()?; + let name = p.parse_identifier(false)?; let asc = p.parse_asc_desc(); Ok(ClusteredIndex { name, asc }) @@ -8437,11 +6797,11 @@ impl<'a> Parser<'a> { } pub fn parse_option_partition(&mut self) -> Result { - self.expect_keyword_is(Keyword::PARTITION)?; + self.expect_keyword(Keyword::PARTITION)?; self.expect_token(&Token::LParen)?; - let column_name = self.parse_identifier()?; + let column_name = self.parse_identifier(false)?; - self.expect_keyword_is(Keyword::RANGE)?; + self.expect_keyword(Keyword::RANGE)?; let range_direction = if self.parse_keyword(Keyword::LEFT) { Some(PartitionRangeDirection::Left) } else if self.parse_keyword(Keyword::RIGHT) { @@ -8474,7 +6834,7 @@ impl<'a> Parser<'a> { pub fn parse_projection_select(&mut self) -> Result { self.expect_token(&Token::LParen)?; - self.expect_keyword_is(Keyword::SELECT)?; + self.expect_keyword(Keyword::SELECT)?; let projection = self.parse_projection()?; let group_by = self.parse_optional_group_by()?; let order_by = self.parse_optional_order_by()?; @@ -8487,7 +6847,7 @@ impl<'a> Parser<'a> { } pub fn parse_alter_table_add_projection(&mut self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let query = self.parse_projection_select()?; Ok(AlterTableOperation::AddProjection { if_not_exists, @@ -8499,11 +6859,7 @@ impl<'a> Parser<'a> { pub fn parse_alter_table_operation(&mut self) -> Result { let operation = if self.parse_keyword(Keyword::ADD) { if let Some(constraint) = self.parse_optional_table_constraint()? { - let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]); - AlterTableOperation::AddConstraint { - constraint, - not_valid, - } + AlterTableOperation::AddConstraint(constraint) } else if dialect_of!(self is ClickHouseDialect|GenericDialect) && self.parse_keyword(Keyword::PROJECTION) { @@ -8549,18 +6905,18 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::RENAME) { if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) { - let old_name = self.parse_identifier()?; - self.expect_keyword_is(Keyword::TO)?; - let new_name = self.parse_identifier()?; + let old_name = self.parse_identifier(false)?; + self.expect_keyword(Keyword::TO)?; + let new_name = self.parse_identifier(false)?; AlterTableOperation::RenameConstraint { old_name, new_name } } else if self.parse_keyword(Keyword::TO) { let table_name = self.parse_object_name(false)?; AlterTableOperation::RenameTable { table_name } } else { let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ] - let old_column_name = self.parse_identifier()?; - self.expect_keyword_is(Keyword::TO)?; - let new_column_name = self.parse_identifier()?; + let old_column_name = self.parse_identifier(false)?; + self.expect_keyword(Keyword::TO)?; + let new_column_name = self.parse_identifier(false)?; AlterTableOperation::RenameColumn { old_column_name, new_column_name, @@ -8570,10 +6926,10 @@ impl<'a> Parser<'a> { if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) { AlterTableOperation::DisableRowLevelSecurity {} } else if self.parse_keyword(Keyword::RULE) { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; AlterTableOperation::DisableRule { name } } else if self.parse_keyword(Keyword::TRIGGER) { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; AlterTableOperation::DisableTrigger { name } } else { return self.expected( @@ -8583,24 +6939,24 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::ENABLE) { if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; AlterTableOperation::EnableAlwaysRule { name } } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; AlterTableOperation::EnableAlwaysTrigger { name } } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) { AlterTableOperation::EnableRowLevelSecurity {} } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; AlterTableOperation::EnableReplicaRule { name } } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; AlterTableOperation::EnableReplicaTrigger { name } } else if self.parse_keyword(Keyword::RULE) { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; AlterTableOperation::EnableRule { name } } else if self.parse_keyword(Keyword::TRIGGER) { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; AlterTableOperation::EnableTrigger { name } } else { return self.expected( @@ -8612,9 +6968,9 @@ impl<'a> Parser<'a> { && dialect_of!(self is ClickHouseDialect|GenericDialect) { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) { - Some(self.parse_identifier()?) + Some(self.parse_identifier(false)?) } else { None }; @@ -8627,9 +6983,9 @@ impl<'a> Parser<'a> { && dialect_of!(self is ClickHouseDialect|GenericDialect) { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) { - Some(self.parse_identifier()?) + Some(self.parse_identifier(false)?) } else { None }; @@ -8657,50 +7013,39 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::CONSTRAINT) { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier()?; - let drop_behavior = self.parse_optional_drop_behavior(); + let name = self.parse_identifier(false)?; + let cascade = self.parse_keyword(Keyword::CASCADE); AlterTableOperation::DropConstraint { if_exists, name, - drop_behavior, + cascade, } - } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) { + } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) + && dialect_of!(self is MySqlDialect | GenericDialect) + { AlterTableOperation::DropPrimaryKey - } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) { - let name = self.parse_identifier()?; - AlterTableOperation::DropForeignKey { name } - } else if self.parse_keyword(Keyword::INDEX) { - let name = self.parse_identifier()?; - AlterTableOperation::DropIndex { name } } else if self.parse_keyword(Keyword::PROJECTION) && dialect_of!(self is ClickHouseDialect|GenericDialect) { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; AlterTableOperation::DropProjection { if_exists, name } - } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) { - AlterTableOperation::DropClusteringKey } else { - let has_column_keyword = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ] + let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ] let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let column_names = if self.dialect.supports_comma_separated_drop_column_list() { - self.parse_comma_separated(Parser::parse_identifier)? - } else { - vec![self.parse_identifier()?] - }; - let drop_behavior = self.parse_optional_drop_behavior(); + let column_name = self.parse_identifier(false)?; + let cascade = self.parse_keyword(Keyword::CASCADE); AlterTableOperation::DropColumn { - has_column_keyword, - column_names, + column_name, if_exists, - drop_behavior, + cascade, } } } else if self.parse_keyword(Keyword::PARTITION) { self.expect_token(&Token::LParen)?; let before = self.parse_comma_separated(Parser::parse_expr)?; self.expect_token(&Token::RParen)?; - self.expect_keyword_is(Keyword::RENAME)?; + self.expect_keyword(Keyword::RENAME)?; self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?; self.expect_token(&Token::LParen)?; let renames = self.parse_comma_separated(Parser::parse_expr)?; @@ -8711,8 +7056,8 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::CHANGE) { let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ] - let old_name = self.parse_identifier()?; - let new_name = self.parse_identifier()?; + let old_name = self.parse_identifier(false)?; + let new_name = self.parse_identifier(false)?; let data_type = self.parse_data_type()?; let mut options = vec![]; while let Some(option) = self.parse_optional_column_option()? { @@ -8730,7 +7075,7 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::MODIFY) { let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ] - let col_name = self.parse_identifier()?; + let col_name = self.parse_identifier(false)?; let data_type = self.parse_data_type()?; let mut options = vec![]; while let Some(option) = self.parse_optional_column_option()? { @@ -8747,7 +7092,7 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::ALTER) { let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ] - let column_name = self.parse_identifier()?; + let column_name = self.parse_identifier(false)?; let is_postgresql = dialect_of!(self is PostgreSqlDialect); let op: AlterColumnOperation = if self.parse_keywords(&[ @@ -8764,10 +7109,16 @@ impl<'a> Parser<'a> { } } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) { AlterColumnOperation::DropDefault {} - } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) { - self.parse_set_data_type(true)? - } else if self.parse_keyword(Keyword::TYPE) { - self.parse_set_data_type(false)? + } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) + || (is_postgresql && self.parse_keyword(Keyword::TYPE)) + { + let data_type = self.parse_data_type()?; + let using = if is_postgresql && self.parse_keyword(Keyword::USING) { + Some(self.parse_expr()?) + } else { + None + }; + AlterColumnOperation::SetDataType { data_type, using } } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) { let generated_as = if self.parse_keyword(Keyword::ALWAYS) { Some(GeneratedAs::Always) @@ -8802,7 +7153,7 @@ impl<'a> Parser<'a> { }; AlterTableOperation::AlterColumn { column_name, op } } else if self.parse_keyword(Keyword::SWAP) { - self.expect_keyword_is(Keyword::WITH)?; + self.expect_keyword(Keyword::WITH)?; let table_name = self.parse_object_name(false)?; AlterTableOperation::SwapWith { table_name } } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) @@ -8827,8 +7178,8 @@ impl<'a> Parser<'a> { { let partition = self.parse_part_or_partition()?; let with_name = if self.parse_keyword(Keyword::WITH) { - self.expect_keyword_is(Keyword::NAME)?; - Some(self.parse_identifier()?) + self.expect_keyword(Keyword::NAME)?; + Some(self.parse_identifier(false)?) } else { None }; @@ -8841,8 +7192,8 @@ impl<'a> Parser<'a> { { let partition = self.parse_part_or_partition()?; let with_name = if self.parse_keyword(Keyword::WITH) { - self.expect_keyword_is(Keyword::NAME)?; - Some(self.parse_identifier()?) + self.expect_keyword(Keyword::NAME)?; + Some(self.parse_identifier(false)?) } else { None }; @@ -8850,75 +7201,6 @@ impl<'a> Parser<'a> { partition, with_name, } - } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) { - self.expect_token(&Token::LParen)?; - let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?; - self.expect_token(&Token::RParen)?; - AlterTableOperation::ClusterBy { exprs } - } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) { - AlterTableOperation::SuspendRecluster - } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) { - AlterTableOperation::ResumeRecluster - } else if self.parse_keyword(Keyword::LOCK) { - let equals = self.consume_token(&Token::Eq); - let lock = match self.parse_one_of_keywords(&[ - Keyword::DEFAULT, - Keyword::EXCLUSIVE, - Keyword::NONE, - Keyword::SHARED, - ]) { - Some(Keyword::DEFAULT) => AlterTableLock::Default, - Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive, - Some(Keyword::NONE) => AlterTableLock::None, - Some(Keyword::SHARED) => AlterTableLock::Shared, - _ => self.expected( - "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]", - self.peek_token(), - )?, - }; - AlterTableOperation::Lock { equals, lock } - } else if self.parse_keyword(Keyword::ALGORITHM) { - let equals = self.consume_token(&Token::Eq); - let algorithm = match self.parse_one_of_keywords(&[ - Keyword::DEFAULT, - Keyword::INSTANT, - Keyword::INPLACE, - Keyword::COPY, - ]) { - Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default, - Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant, - Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace, - Some(Keyword::COPY) => AlterTableAlgorithm::Copy, - _ => self.expected( - "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]", - self.peek_token(), - )?, - }; - AlterTableOperation::Algorithm { equals, algorithm } - } else if self.parse_keyword(Keyword::AUTO_INCREMENT) { - let equals = self.consume_token(&Token::Eq); - let value = self.parse_number_value()?; - AlterTableOperation::AutoIncrement { equals, value } - } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) { - let identity = if self.parse_keyword(Keyword::NONE) { - ReplicaIdentity::None - } else if self.parse_keyword(Keyword::FULL) { - ReplicaIdentity::Full - } else if self.parse_keyword(Keyword::DEFAULT) { - ReplicaIdentity::Default - } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) { - ReplicaIdentity::Index(self.parse_identifier()?) - } else { - return self.expected( - "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY", - self.peek_token(), - ); - }; - - AlterTableOperation::ReplicaIdentity { identity } - } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) { - let name = self.parse_identifier()?; - AlterTableOperation::ValidateConstraint { name } } else { let options: Vec = self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?; @@ -8928,7 +7210,7 @@ impl<'a> Parser<'a> { } } else { return self.expected( - "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, or SET TBLPROPERTIES after ALTER TABLE", + "ADD, RENAME, PARTITION, SWAP, DROP, or SET TBLPROPERTIES after ALTER TABLE", self.peek_token(), ); } @@ -8936,22 +7218,6 @@ impl<'a> Parser<'a> { Ok(operation) } - fn parse_set_data_type(&mut self, had_set: bool) -> Result { - let data_type = self.parse_data_type()?; - let using = if self.dialect.supports_alter_column_type_using() - && self.parse_keyword(Keyword::USING) - { - Some(self.parse_expr()?) - } else { - None - }; - Ok(AlterColumnOperation::SetDataType { - data_type, - using, - had_set, - }) - } - fn parse_part_or_partition(&mut self) -> Result { let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?; match keyword { @@ -8965,21 +7231,41 @@ impl<'a> Parser<'a> { pub fn parse_alter(&mut self) -> Result { let object_type = self.expect_one_of_keywords(&[ Keyword::VIEW, - Keyword::TYPE, Keyword::TABLE, Keyword::INDEX, Keyword::ROLE, Keyword::POLICY, - Keyword::CONNECTOR, - Keyword::ICEBERG, ])?; match object_type { Keyword::VIEW => self.parse_alter_view(), - Keyword::TYPE => self.parse_alter_type(), - Keyword::TABLE => self.parse_alter_table(false), - Keyword::ICEBERG => { - self.expect_keyword(Keyword::TABLE)?; - self.parse_alter_table(true) + Keyword::TABLE => { + let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); + let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ] + let table_name = self.parse_object_name(false)?; + let on_cluster = self.parse_optional_on_cluster()?; + let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?; + + let mut location = None; + if self.parse_keyword(Keyword::LOCATION) { + location = Some(HiveSetLocation { + has_set: false, + location: self.parse_identifier(false)?, + }); + } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) { + location = Some(HiveSetLocation { + has_set: true, + location: self.parse_identifier(false)?, + }); + } + + Ok(Statement::AlterTable { + name: table_name, + if_exists, + only, + operations, + location, + on_cluster, + }) } Keyword::INDEX => { let index_name = self.parse_object_name(false)?; @@ -9001,51 +7287,18 @@ impl<'a> Parser<'a> { } Keyword::ROLE => self.parse_alter_role(), Keyword::POLICY => self.parse_alter_policy(), - Keyword::CONNECTOR => self.parse_alter_connector(), // unreachable because expect_one_of_keywords used above _ => unreachable!(), } } - /// Parse a [Statement::AlterTable] - pub fn parse_alter_table(&mut self, iceberg: bool) -> Result { - let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ] - let table_name = self.parse_object_name(false)?; - let on_cluster = self.parse_optional_on_cluster()?; - let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?; - - let mut location = None; - if self.parse_keyword(Keyword::LOCATION) { - location = Some(HiveSetLocation { - has_set: false, - location: self.parse_identifier()?, - }); - } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) { - location = Some(HiveSetLocation { - has_set: true, - location: self.parse_identifier()?, - }); - } - - Ok(Statement::AlterTable { - name: table_name, - if_exists, - only, - operations, - location, - on_cluster, - iceberg, - }) - } - pub fn parse_alter_view(&mut self) -> Result { let name = self.parse_object_name(false)?; let columns = self.parse_parenthesized_column_list(Optional, false)?; let with_options = self.parse_options(Keyword::WITH)?; - self.expect_keyword_is(Keyword::AS)?; + self.expect_keyword(Keyword::AS)?; let query = self.parse_query()?; Ok(Statement::AlterView { @@ -9056,55 +7309,6 @@ impl<'a> Parser<'a> { }) } - /// Parse a [Statement::AlterType] - pub fn parse_alter_type(&mut self) -> Result { - let name = self.parse_object_name(false)?; - - if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) { - let new_name = self.parse_identifier()?; - Ok(Statement::AlterType(AlterType { - name, - operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }), - })) - } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) { - let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); - let new_enum_value = self.parse_identifier()?; - let position = if self.parse_keyword(Keyword::BEFORE) { - Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?)) - } else if self.parse_keyword(Keyword::AFTER) { - Some(AlterTypeAddValuePosition::After(self.parse_identifier()?)) - } else { - None - }; - - Ok(Statement::AlterType(AlterType { - name, - operation: AlterTypeOperation::AddValue(AlterTypeAddValue { - if_not_exists, - value: new_enum_value, - position, - }), - })) - } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) { - let existing_enum_value = self.parse_identifier()?; - self.expect_keyword(Keyword::TO)?; - let new_enum_value = self.parse_identifier()?; - - Ok(Statement::AlterType(AlterType { - name, - operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue { - from: existing_enum_value, - to: new_enum_value, - }), - })) - } else { - return self.expected_ref( - "{RENAME TO | { RENAME | ADD } VALUE}", - self.peek_token_ref(), - ); - } - } - /// Parse a `CALL procedure_name(arg1, arg2, ...)` /// or `CALL procedure_name` statement pub fn parse_call(&mut self) -> Result { @@ -9114,13 +7318,12 @@ impl<'a> Parser<'a> { Expr::Function(f) => Ok(Statement::Call(f)), other => parser_err!( format!("Expected a simple procedure call but found: {other}"), - self.peek_token().span.start + self.peek_token().location ), } } else { Ok(Statement::Call(Function { name: object_name, - uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::None, over: None, @@ -9198,19 +7401,11 @@ impl<'a> Parser<'a> { }) } - /// Parse [Statement::Open] - fn parse_open(&mut self) -> Result { - self.expect_keyword(Keyword::OPEN)?; - Ok(Statement::Open(OpenStatement { - cursor_name: self.parse_identifier()?, - })) - } - pub fn parse_close(&mut self) -> Result { let cursor = if self.parse_keyword(Keyword::ALL) { CloseCursor::All } else { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; CloseCursor::Specific { name } }; @@ -9232,7 +7427,7 @@ impl<'a> Parser<'a> { Keyword::FORCE_NULL, Keyword::ENCODING, ]) { - Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?), + Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier(false)?), Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!( self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]), Some(Keyword::FALSE) @@ -9308,12 +7503,12 @@ impl<'a> Parser<'a> { } Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => { CopyLegacyCsvOption::ForceNotNull( - self.parse_comma_separated(|p| p.parse_identifier())?, + self.parse_comma_separated(|p| p.parse_identifier(false))?, ) } Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => { CopyLegacyCsvOption::ForceQuote( - self.parse_comma_separated(|p| p.parse_identifier())?, + self.parse_comma_separated(|p| p.parse_identifier(false))?, ) } _ => self.expected("csv option", self.peek_token())?, @@ -9327,7 +7522,7 @@ impl<'a> Parser<'a> { let loc = self .tokens .get(self.index - 1) - .map_or(Location { line: 0, column: 0 }, |t| t.span.start); + .map_or(Location { line: 0, column: 0 }, |t| t.location); return parser_err!(format!("Expect a char, found {s:?}"), loc); } Ok(s.chars().next().unwrap()) @@ -9371,114 +7566,102 @@ impl<'a> Parser<'a> { } /// Parse a literal value (numbers, strings, date/time, booleans) - pub fn parse_value(&mut self) -> Result { + pub fn parse_value(&mut self) -> Result { let next_token = self.next_token(); - let span = next_token.span; - let ok_value = |value: Value| Ok(value.with_span(span)); + let location = next_token.location; match next_token.token { Token::Word(w) => match w.keyword { - Keyword::TRUE if self.dialect.supports_boolean_literals() => { - ok_value(Value::Boolean(true)) - } - Keyword::FALSE if self.dialect.supports_boolean_literals() => { - ok_value(Value::Boolean(false)) - } - Keyword::NULL => ok_value(Value::Null), + Keyword::TRUE => Ok(Value::Boolean(true)), + Keyword::FALSE => Ok(Value::Boolean(false)), + Keyword::NULL => Ok(Value::Null), Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style { - Some('"') => ok_value(Value::DoubleQuotedString(w.value)), - Some('\'') => ok_value(Value::SingleQuotedString(w.value)), + Some('"') => Ok(Value::DoubleQuotedString(w.value)), + Some('\'') => Ok(Value::SingleQuotedString(w.value)), _ => self.expected( "A value?", - TokenWithSpan { + TokenWithLocation { token: Token::Word(w), - span, + location, }, )?, }, _ => self.expected( "a concrete value", - TokenWithSpan { + TokenWithLocation { token: Token::Word(w), - span, + location, }, ), }, // The call to n.parse() returns a bigdecimal when the // bigdecimal feature is enabled, and is otherwise a no-op // (i.e., it returns the input string). - Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)), - Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(s.to_string())), - Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(s.to_string())), + Token::Number(n, l) => Ok(Value::Number(Self::parse(n, location)?, l)), + Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())), + Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())), Token::TripleSingleQuotedString(ref s) => { - ok_value(Value::TripleSingleQuotedString(s.to_string())) + Ok(Value::TripleSingleQuotedString(s.to_string())) } Token::TripleDoubleQuotedString(ref s) => { - ok_value(Value::TripleDoubleQuotedString(s.to_string())) + Ok(Value::TripleDoubleQuotedString(s.to_string())) } - Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())), + Token::DollarQuotedString(ref s) => Ok(Value::DollarQuotedString(s.clone())), Token::SingleQuotedByteStringLiteral(ref s) => { - ok_value(Value::SingleQuotedByteStringLiteral(s.clone())) + Ok(Value::SingleQuotedByteStringLiteral(s.clone())) } Token::DoubleQuotedByteStringLiteral(ref s) => { - ok_value(Value::DoubleQuotedByteStringLiteral(s.clone())) + Ok(Value::DoubleQuotedByteStringLiteral(s.clone())) } Token::TripleSingleQuotedByteStringLiteral(ref s) => { - ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone())) + Ok(Value::TripleSingleQuotedByteStringLiteral(s.clone())) } Token::TripleDoubleQuotedByteStringLiteral(ref s) => { - ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone())) + Ok(Value::TripleDoubleQuotedByteStringLiteral(s.clone())) } Token::SingleQuotedRawStringLiteral(ref s) => { - ok_value(Value::SingleQuotedRawStringLiteral(s.clone())) + Ok(Value::SingleQuotedRawStringLiteral(s.clone())) } Token::DoubleQuotedRawStringLiteral(ref s) => { - ok_value(Value::DoubleQuotedRawStringLiteral(s.clone())) + Ok(Value::DoubleQuotedRawStringLiteral(s.clone())) } Token::TripleSingleQuotedRawStringLiteral(ref s) => { - ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone())) + Ok(Value::TripleSingleQuotedRawStringLiteral(s.clone())) } Token::TripleDoubleQuotedRawStringLiteral(ref s) => { - ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone())) + Ok(Value::TripleDoubleQuotedRawStringLiteral(s.clone())) } - Token::NationalStringLiteral(ref s) => { - ok_value(Value::NationalStringLiteral(s.to_string())) - } - Token::EscapedStringLiteral(ref s) => { - ok_value(Value::EscapedStringLiteral(s.to_string())) - } - Token::UnicodeStringLiteral(ref s) => { - ok_value(Value::UnicodeStringLiteral(s.to_string())) - } - Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())), - Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())), + Token::NationalStringLiteral(ref s) => Ok(Value::NationalStringLiteral(s.to_string())), + Token::EscapedStringLiteral(ref s) => Ok(Value::EscapedStringLiteral(s.to_string())), + Token::UnicodeStringLiteral(ref s) => Ok(Value::UnicodeStringLiteral(s.to_string())), + Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())), + Token::Placeholder(ref s) => Ok(Value::Placeholder(s.to_string())), tok @ Token::Colon | tok @ Token::AtSign => { // Not calling self.parse_identifier(false)? because only in placeholder we want to check numbers as idfentifies // This because snowflake allows numbers as placeholders let next_token = self.next_token(); let ident = match next_token.token { - Token::Word(w) => Ok(w.into_ident(next_token.span)), + Token::Word(w) => Ok(w.to_ident()), Token::Number(w, false) => Ok(Ident::new(w)), _ => self.expected("placeholder", next_token), }?; let placeholder = tok.to_string() + &ident.value; - ok_value(Value::Placeholder(placeholder)) + Ok(Value::Placeholder(placeholder)) } unexpected => self.expected( "a value", - TokenWithSpan { + TokenWithLocation { token: unexpected, - span, + location, }, ), } } /// Parse an unsigned numeric literal - pub fn parse_number_value(&mut self) -> Result { - let value_wrapper = self.parse_value()?; - match &value_wrapper.value { - Value::Number(_, _) => Ok(value_wrapper), - Value::Placeholder(_) => Ok(value_wrapper), + pub fn parse_number_value(&mut self) -> Result { + match self.parse_value()? { + v @ Value::Number(_, _) => Ok(v), + v @ Value::Placeholder(_) => Ok(v), _ => { self.prev_token(); self.expected("literal number", self.peek_token()) @@ -9506,24 +7689,18 @@ impl<'a> Parser<'a> { } } - fn parse_introduced_string_expr(&mut self) -> Result { + fn parse_introduced_string_value(&mut self) -> Result { let next_token = self.next_token(); - let span = next_token.span; + let location = next_token.location; match next_token.token { - Token::SingleQuotedString(ref s) => Ok(Expr::Value( - Value::SingleQuotedString(s.to_string()).with_span(span), - )), - Token::DoubleQuotedString(ref s) => Ok(Expr::Value( - Value::DoubleQuotedString(s.to_string()).with_span(span), - )), - Token::HexStringLiteral(ref s) => Ok(Expr::Value( - Value::HexStringLiteral(s.to_string()).with_span(span), - )), + Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())), + Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())), + Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())), unexpected => self.expected( "a string value", - TokenWithSpan { + TokenWithLocation { token: unexpected, - span, + location, }, ), } @@ -9533,7 +7710,7 @@ impl<'a> Parser<'a> { pub fn parse_literal_uint(&mut self) -> Result { let next_token = self.next_token(); match next_token.token { - Token::Number(s, _) => Self::parse::(s, next_token.span.start), + Token::Number(s, _) => Self::parse::(s, next_token.location), _ => self.expected("literal int", next_token), } } @@ -9542,16 +7719,15 @@ impl<'a> Parser<'a> { /// e.g. `CREATE FUNCTION ... AS $$ body $$`. fn parse_create_function_body_string(&mut self) -> Result { let peek_token = self.peek_token(); - let span = peek_token.span; match peek_token.token { Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => { self.next_token(); - Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span))) + Ok(Expr::Value(Value::DollarQuotedString(s))) } - _ => Ok(Expr::Value( - Value::SingleQuotedString(self.parse_literal_string()?).with_span(span), - )), + _ => Ok(Expr::Value(Value::SingleQuotedString( + self.parse_literal_string()?, + ))), } } @@ -9574,50 +7750,6 @@ impl<'a> Parser<'a> { } } - /// Parse a literal unicode normalization clause - pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result { - let neg = self.parse_keyword(Keyword::NOT); - let normalized_form = self.maybe_parse(|parser| { - match parser.parse_one_of_keywords(&[ - Keyword::NFC, - Keyword::NFD, - Keyword::NFKC, - Keyword::NFKD, - ]) { - Some(Keyword::NFC) => Ok(NormalizationForm::NFC), - Some(Keyword::NFD) => Ok(NormalizationForm::NFD), - Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC), - Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD), - _ => parser.expected("unicode normalization form", parser.peek_token()), - } - })?; - if self.parse_keyword(Keyword::NORMALIZED) { - return Ok(Expr::IsNormalized { - expr: Box::new(expr), - form: normalized_form, - negated: neg, - }); - } - self.expected("unicode normalization form", self.peek_token()) - } - - pub fn parse_enum_values(&mut self) -> Result, ParserError> { - self.expect_token(&Token::LParen)?; - let values = self.parse_comma_separated(|parser| { - let name = parser.parse_literal_string()?; - let e = if parser.consume_token(&Token::Eq) { - let value = parser.parse_number()?; - EnumMember::NamedValue(name, value) - } else { - EnumMember::Name(name) - }; - Ok(e) - })?; - self.expect_token(&Token::RParen)?; - - Ok(values) - } - /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example) pub fn parse_data_type(&mut self) -> Result { let (ty, trailing_bracket) = self.parse_data_type_helper()?; @@ -9634,13 +7766,9 @@ impl<'a> Parser<'a> { fn parse_data_type_helper( &mut self, ) -> Result<(DataType, MatchedTrailingBracket), ParserError> { - let dialect = self.dialect; - self.advance_token(); - let next_token = self.get_current_token(); - let next_token_index = self.get_current_index(); - + let next_token = self.next_token(); let mut trailing_bracket: MatchedTrailingBracket = false.into(); - let mut data = match &next_token.token { + let mut data = match next_token.token { Token::Word(w) => match w.keyword { Keyword::BOOLEAN => Ok(DataType::Boolean), Keyword::BOOL => Ok(DataType::Bool), @@ -9654,15 +7782,13 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::PRECISION) { Ok(DataType::DoublePrecision) } else { - Ok(DataType::Double( - self.parse_exact_number_optional_precision_scale()?, - )) + Ok(DataType::Double) } } Keyword::TINYINT => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::TinyIntUnsigned(optional_precision?)) + Ok(DataType::UnsignedTinyInt(optional_precision?)) } else { Ok(DataType::TinyInt(optional_precision?)) } @@ -9670,7 +7796,7 @@ impl<'a> Parser<'a> { Keyword::INT2 => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::Int2Unsigned(optional_precision?)) + Ok(DataType::UnsignedInt2(optional_precision?)) } else { Ok(DataType::Int2(optional_precision?)) } @@ -9678,7 +7804,7 @@ impl<'a> Parser<'a> { Keyword::SMALLINT => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::SmallIntUnsigned(optional_precision?)) + Ok(DataType::UnsignedSmallInt(optional_precision?)) } else { Ok(DataType::SmallInt(optional_precision?)) } @@ -9686,7 +7812,7 @@ impl<'a> Parser<'a> { Keyword::MEDIUMINT => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::MediumIntUnsigned(optional_precision?)) + Ok(DataType::UnsignedMediumInt(optional_precision?)) } else { Ok(DataType::MediumInt(optional_precision?)) } @@ -9694,7 +7820,7 @@ impl<'a> Parser<'a> { Keyword::INT => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::IntUnsigned(optional_precision?)) + Ok(DataType::UnsignedInt(optional_precision?)) } else { Ok(DataType::Int(optional_precision?)) } @@ -9702,7 +7828,7 @@ impl<'a> Parser<'a> { Keyword::INT4 => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::Int4Unsigned(optional_precision?)) + Ok(DataType::UnsignedInt4(optional_precision?)) } else { Ok(DataType::Int4(optional_precision?)) } @@ -9710,7 +7836,7 @@ impl<'a> Parser<'a> { Keyword::INT8 => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::Int8Unsigned(optional_precision?)) + Ok(DataType::UnsignedInt8(optional_precision?)) } else { Ok(DataType::Int8(optional_precision?)) } @@ -9723,7 +7849,7 @@ impl<'a> Parser<'a> { Keyword::INTEGER => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::IntegerUnsigned(optional_precision?)) + Ok(DataType::UnsignedInteger(optional_precision?)) } else { Ok(DataType::Integer(optional_precision?)) } @@ -9731,16 +7857,11 @@ impl<'a> Parser<'a> { Keyword::BIGINT => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::BigIntUnsigned(optional_precision?)) + Ok(DataType::UnsignedBigInt(optional_precision?)) } else { Ok(DataType::BigInt(optional_precision?)) } } - Keyword::HUGEINT => Ok(DataType::HugeInt), - Keyword::UBIGINT => Ok(DataType::UBigInt), - Keyword::UHUGEINT => Ok(DataType::UHugeInt), - Keyword::USMALLINT => Ok(DataType::USmallInt), - Keyword::UTINYINT => Ok(DataType::UTinyInt), Keyword::UINT8 => Ok(DataType::UInt8), Keyword::UINT16 => Ok(DataType::UInt16), Keyword::UINT32 => Ok(DataType::UInt32), @@ -9777,20 +7898,9 @@ impl<'a> Parser<'a> { } Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)), Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)), - Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)), + Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_precision()?)), Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)), - Keyword::TINYBLOB => Ok(DataType::TinyBlob), - Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob), - Keyword::LONGBLOB => Ok(DataType::LongBlob), Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)), - Keyword::BIT => { - if self.parse_keyword(Keyword::VARYING) { - Ok(DataType::BitVarying(self.parse_optional_precision()?)) - } else { - Ok(DataType::Bit(self.parse_optional_precision()?)) - } - } - Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)), Keyword::UUID => Ok(DataType::Uuid), Keyword::DATE => Ok(DataType::Date), Keyword::DATE32 => Ok(DataType::Date32), @@ -9817,7 +7927,6 @@ impl<'a> Parser<'a> { self.parse_optional_precision()?, TimezoneInfo::Tz, )), - Keyword::TIMESTAMP_NTZ => Ok(DataType::TimestampNtz), Keyword::TIME => { let precision = self.parse_optional_precision()?; let tz = if self.parse_keyword(Keyword::WITH) { @@ -9850,9 +7959,6 @@ impl<'a> Parser<'a> { Ok(DataType::FixedString(character_length)) } Keyword::TEXT => Ok(DataType::Text), - Keyword::TINYTEXT => Ok(DataType::TinyText), - Keyword::MEDIUMTEXT => Ok(DataType::MediumText), - Keyword::LONGTEXT => Ok(DataType::LongText), Keyword::BYTEA => Ok(DataType::Bytea), Keyword::NUMERIC => Ok(DataType::Numeric( self.parse_exact_number_optional_precision_scale()?, @@ -9869,9 +7975,7 @@ impl<'a> Parser<'a> { Keyword::BIGDECIMAL => Ok(DataType::BigDecimal( self.parse_exact_number_optional_precision_scale()?, )), - Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)), - Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))), - Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))), + Keyword::ENUM => Ok(DataType::Enum(self.parse_string_values()?)), Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)), Keyword::ARRAY => { if dialect_of!(self is SnowflakeDialect) { @@ -9889,12 +7993,12 @@ impl<'a> Parser<'a> { )))) } } - Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => { + Keyword::STRUCT if dialect_of!(self is DuckDbDialect) => { self.prev_token(); let field_defs = self.parse_duckdb_struct_type_def()?; Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses)) } - Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => { + Keyword::STRUCT if dialect_of!(self is BigQueryDialect | GenericDialect) => { self.prev_token(); let (field_defs, _trailing_bracket) = self.parse_struct_type_def(Self::parse_struct_field_def)?; @@ -9904,18 +8008,18 @@ impl<'a> Parser<'a> { StructBracketKind::AngleBrackets, )) } - Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => { + Keyword::UNION if dialect_of!(self is DuckDbDialect | GenericDialect) => { self.prev_token(); let fields = self.parse_union_type_def()?; Ok(DataType::Union(fields)) } - Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => { + Keyword::NULLABLE if dialect_of!(self is ClickHouseDialect | GenericDialect) => { Ok(self.parse_sub_type(DataType::Nullable)?) } - Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => { + Keyword::LOWCARDINALITY if dialect_of!(self is ClickHouseDialect | GenericDialect) => { Ok(self.parse_sub_type(DataType::LowCardinality)?) } - Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => { + Keyword::MAP if dialect_of!(self is ClickHouseDialect | GenericDialect) => { self.prev_token(); let (key_data_type, value_data_type) = self.parse_click_house_map_def()?; Ok(DataType::Map( @@ -9923,52 +8027,18 @@ impl<'a> Parser<'a> { Box::new(value_data_type), )) } - Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => { + Keyword::NESTED if dialect_of!(self is ClickHouseDialect | GenericDialect) => { self.expect_token(&Token::LParen)?; let field_defs = self.parse_comma_separated(Parser::parse_column_def)?; self.expect_token(&Token::RParen)?; Ok(DataType::Nested(field_defs)) } - Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => { + Keyword::TUPLE if dialect_of!(self is ClickHouseDialect | GenericDialect) => { self.prev_token(); let field_defs = self.parse_click_house_tuple_def()?; Ok(DataType::Tuple(field_defs)) } Keyword::TRIGGER => Ok(DataType::Trigger), - Keyword::ANY if self.peek_keyword(Keyword::TYPE) => { - let _ = self.parse_keyword(Keyword::TYPE); - Ok(DataType::AnyType) - } - Keyword::TABLE => { - // an LParen after the TABLE keyword indicates that table columns are being defined - // whereas no LParen indicates an anonymous table expression will be returned - if self.peek_token() == Token::LParen { - let columns = self.parse_returns_table_columns()?; - Ok(DataType::Table(Some(columns))) - } else { - Ok(DataType::Table(None)) - } - } - Keyword::SIGNED => { - if self.parse_keyword(Keyword::INTEGER) { - Ok(DataType::SignedInteger) - } else { - Ok(DataType::Signed) - } - } - Keyword::UNSIGNED => { - if self.parse_keyword(Keyword::INTEGER) { - Ok(DataType::UnsignedInteger) - } else { - Ok(DataType::Unsigned) - } - } - Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { - Ok(DataType::TsVector) - } - Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { - Ok(DataType::TsQuery) - } _ => { self.prev_token(); let type_name = self.parse_object_name(false)?; @@ -9979,31 +8049,23 @@ impl<'a> Parser<'a> { } } }, - _ => self.expected_at("a data type name", next_token_index), + _ => self.expected("a data type name", next_token), }?; - if self.dialect.supports_array_typedef_with_brackets() { - while self.consume_token(&Token::LBracket) { - // Parse optional array data type size - let size = self.maybe_parse(|p| p.parse_literal_uint())?; - self.expect_token(&Token::RBracket)?; - data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size)) - } + // Parse array data types. Note: this is postgresql-specific and different from + // Keyword::ARRAY syntax from above + while self.consume_token(&Token::LBracket) { + let size = if dialect_of!(self is GenericDialect | DuckDbDialect | PostgreSqlDialect) { + self.maybe_parse(|p| p.parse_literal_uint())? + } else { + None + }; + self.expect_token(&Token::RBracket)?; + data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size)) } Ok((data, trailing_bracket)) } - fn parse_returns_table_column(&mut self) -> Result { - self.parse_column_def() - } - - fn parse_returns_table_columns(&mut self) -> Result, ParserError> { - self.expect_token(&Token::LParen)?; - let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?; - self.expect_token(&Token::RParen)?; - Ok(columns) - } - pub fn parse_string_values(&mut self) -> Result, ParserError> { self.expect_token(&Token::LParen)?; let mut values = Vec::new(); @@ -10025,182 +8087,44 @@ impl<'a> Parser<'a> { /// Strictly parse `identifier AS identifier` pub fn parse_identifier_with_alias(&mut self) -> Result { - let ident = self.parse_identifier()?; - self.expect_keyword_is(Keyword::AS)?; - let alias = self.parse_identifier()?; + let ident = self.parse_identifier(false)?; + self.expect_keyword(Keyword::AS)?; + let alias = self.parse_identifier(false)?; Ok(IdentWithAlias { ident, alias }) } - /// Parse `identifier [AS] identifier` where the AS keyword is optional - fn parse_identifier_with_optional_alias(&mut self) -> Result { - let ident = self.parse_identifier()?; - let _after_as = self.parse_keyword(Keyword::AS); - let alias = self.parse_identifier()?; - Ok(IdentWithAlias { ident, alias }) - } - - /// Parse comma-separated list of parenthesized queries for pipe operators - fn parse_pipe_operator_queries(&mut self) -> Result, ParserError> { - self.parse_comma_separated(|parser| { - parser.expect_token(&Token::LParen)?; - let query = parser.parse_query()?; - parser.expect_token(&Token::RParen)?; - Ok(*query) - }) - } - - /// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT - fn parse_distinct_required_set_quantifier( - &mut self, - operator_name: &str, - ) -> Result { - let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect)); - match quantifier { - SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier), - _ => Err(ParserError::ParserError(format!( - "{operator_name} pipe operator requires DISTINCT modifier", - ))), - } - } - - /// Parse optional identifier alias (with or without AS keyword) - fn parse_identifier_optional_alias(&mut self) -> Result, ParserError> { - if self.parse_keyword(Keyword::AS) { - Ok(Some(self.parse_identifier()?)) - } else { - // Check if the next token is an identifier (implicit alias) - self.maybe_parse(|parser| parser.parse_identifier()) - } - } - - /// Optionally parses an alias for a select list item - fn maybe_parse_select_item_alias(&mut self) -> Result, ParserError> { - fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { - parser.dialect.is_select_item_alias(explicit, kw, parser) - } - self.parse_optional_alias_inner(None, validator) - } - - /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`. - /// In this case, the alias is allowed to optionally name the columns in the table, in - /// addition to the table itself. - pub fn maybe_parse_table_alias(&mut self) -> Result, ParserError> { - fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { - parser.dialect.is_table_factor_alias(explicit, kw, parser) - } - match self.parse_optional_alias_inner(None, validator)? { - Some(name) => { - let columns = self.parse_table_alias_column_defs()?; - Ok(Some(TableAlias { name, columns })) - } - None => Ok(None), - } - } - - fn parse_table_index_hints(&mut self) -> Result, ParserError> { - let mut hints = vec![]; - while let Some(hint_type) = - self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE]) - { - let hint_type = match hint_type { - Keyword::USE => TableIndexHintType::Use, - Keyword::IGNORE => TableIndexHintType::Ignore, - Keyword::FORCE => TableIndexHintType::Force, - _ => { - return self.expected( - "expected to match USE/IGNORE/FORCE keyword", - self.peek_token(), - ) - } - }; - let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) { - Some(Keyword::INDEX) => TableIndexType::Index, - Some(Keyword::KEY) => TableIndexType::Key, - _ => { - return self.expected("expected to match INDEX/KEY keyword", self.peek_token()) - } - }; - let for_clause = if self.parse_keyword(Keyword::FOR) { - let clause = if self.parse_keyword(Keyword::JOIN) { - TableIndexHintForClause::Join - } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) { - TableIndexHintForClause::OrderBy - } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) { - TableIndexHintForClause::GroupBy - } else { - return self.expected( - "expected to match FOR/ORDER BY/GROUP BY table hint in for clause", - self.peek_token(), - ); - }; - Some(clause) - } else { - None - }; - - self.expect_token(&Token::LParen)?; - let index_names = if self.peek_token().token != Token::RParen { - self.parse_comma_separated(Parser::parse_identifier)? - } else { - vec![] - }; - self.expect_token(&Token::RParen)?; - hints.push(TableIndexHints { - hint_type, - index_type, - for_clause, - index_names, - }); - } - Ok(hints) - } - - /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility - /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias` - /// and `maybe_parse_table_alias`. + /// Parse `AS identifier` (or simply `identifier` if it's not a reserved keyword) + /// Some examples with aliases: `SELECT 1 foo`, `SELECT COUNT(*) AS cnt`, + /// `SELECT ... FROM t1 foo, t2 bar`, `SELECT ... FROM (...) AS bar` pub fn parse_optional_alias( &mut self, reserved_kwds: &[Keyword], ) -> Result, ParserError> { - fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool { - false - } - self.parse_optional_alias_inner(Some(reserved_kwds), validator) - } - - /// Parses an optional alias after a SQL element such as a select list item - /// or a table name. - /// - /// This method accepts an optional list of reserved keywords or a function - /// to call to validate if a keyword should be parsed as an alias, to allow - /// callers to customize the parsing logic based on their context. - fn parse_optional_alias_inner( - &mut self, - reserved_kwds: Option<&[Keyword]>, - validator: F, - ) -> Result, ParserError> - where - F: Fn(bool, &Keyword, &mut Parser) -> bool, - { let after_as = self.parse_keyword(Keyword::AS); - let next_token = self.next_token(); match next_token.token { - // By default, if a word is located after the `AS` keyword we consider it an alias - // as long as it's not reserved. - Token::Word(w) - if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) => - { - Ok(Some(w.into_ident(next_token.span))) + // Accept any identifier after `AS` (though many dialects have restrictions on + // keywords that may appear here). If there's no `AS`: don't parse keywords, + // which may start a construct allowed in this position, to be parsed as aliases. + // (For example, in `FROM t1 JOIN` the `JOIN` will always be parsed as a keyword, + // not an alias.) + Token::Word(w) if after_as || !reserved_kwds.contains(&w.keyword) => { + Ok(Some(w.to_ident())) } - // This pattern allows for customizing the acceptance of words as aliases based on the caller's - // context, such as to what SQL element this word is a potential alias of (select item alias, table name - // alias, etc.) or dialect-specific logic that goes beyond a simple list of reserved keywords. - Token::Word(w) if validator(after_as, &w.keyword, self) => { - Ok(Some(w.into_ident(next_token.span))) - } - // For backwards-compatibility, we accept quoted strings as aliases regardless of the context. + // MSSQL supports single-quoted strings as aliases for columns + // We accept them as table aliases too, although MSSQL does not. + // + // Note, that this conflicts with an obscure rule from the SQL + // standard, which we don't implement: + // https://crate.io/docs/sql-99/en/latest/chapters/07.html#character-string-literal-s + // "[Obscure Rule] SQL allows you to break a long up into two or more smaller s, split by a that includes a newline + // character. When it sees such a , your DBMS will + // ignore the and treat the multiple strings as + // a single ." Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))), + // Support for MySql dialect double-quoted string, `AS "HOUR"` for example Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))), _ => { if after_as { @@ -10212,6 +8136,23 @@ impl<'a> Parser<'a> { } } + /// Parse `AS identifier` when the AS is describing a table-valued object, + /// like in `... FROM generate_series(1, 10) AS t (col)`. In this case + /// the alias is allowed to optionally name the columns in the table, in + /// addition to the table itself. + pub fn parse_optional_table_alias( + &mut self, + reserved_kwds: &[Keyword], + ) -> Result, ParserError> { + match self.parse_optional_alias(reserved_kwds)? { + Some(name) => { + let columns = self.parse_parenthesized_column_list(Optional, false)?; + Ok(Some(TableAlias { name, columns })) + } + None => Ok(None), + } + } + pub fn parse_optional_group_by(&mut self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) { let expressions = if self.parse_keyword(Keyword::ALL) { @@ -10221,7 +8162,7 @@ impl<'a> Parser<'a> { }; let mut modifiers = vec![]; - if self.dialect.supports_group_by_with_modifier() { + if dialect_of!(self is ClickHouseDialect | GenericDialect) { loop { if !self.parse_keyword(Keyword::WITH) { break; @@ -10238,26 +8179,12 @@ impl<'a> Parser<'a> { _ => { return parser_err!( "BUG: expected to match GroupBy modifier keyword", - self.peek_token().span.start + self.peek_token().location ) } }); } } - if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) { - self.expect_token(&Token::LParen)?; - let result = self.parse_comma_separated(|p| { - if p.peek_token_ref().token == Token::LParen { - p.parse_tuple(true, true) - } else { - Ok(vec![p.parse_expr()?]) - } - })?; - self.expect_token(&Token::RParen)?; - modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets( - result, - ))); - }; let group_by = match expressions { None => GroupByExpr::All(modifiers), Some(exprs) => GroupByExpr::Expressions(exprs, modifiers), @@ -10270,97 +8197,22 @@ impl<'a> Parser<'a> { pub fn parse_optional_order_by(&mut self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) { - let order_by = - if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) { - let order_by_options = self.parse_order_by_options()?; - OrderBy { - kind: OrderByKind::All(order_by_options), - interpolate: None, - } - } else { - let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?; - let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) { - self.parse_interpolations()? - } else { - None - }; - OrderBy { - kind: OrderByKind::Expressions(exprs), - interpolate, - } - }; - Ok(Some(order_by)) - } else { - Ok(None) - } - } - - fn parse_optional_limit_clause(&mut self) -> Result, ParserError> { - let mut offset = if self.parse_keyword(Keyword::OFFSET) { - Some(self.parse_offset()?) - } else { - None - }; - - let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) { - let expr = self.parse_limit()?; - - if self.dialect.supports_limit_comma() - && offset.is_none() - && expr.is_some() // ALL not supported with comma - && self.consume_token(&Token::Comma) - { - let offset = expr.ok_or_else(|| { - ParserError::ParserError( - "Missing offset for LIMIT , ".to_string(), - ) - })?; - return Ok(Some(LimitClause::OffsetCommaLimit { - offset, - limit: self.parse_expr()?, - })); - } - - let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect) - && self.parse_keyword(Keyword::BY) - { - Some(self.parse_comma_separated(Parser::parse_expr)?) + let order_by_exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?; + let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) { + self.parse_interpolations()? } else { None }; - (Some(expr), limit_by) - } else { - (None, None) - }; - - if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) { - offset = Some(self.parse_offset()?); - } - - if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() { - Ok(Some(LimitClause::LimitOffset { - limit: limit.unwrap_or_default(), - offset, - limit_by: limit_by.unwrap_or_default(), + Ok(Some(OrderBy { + exprs: order_by_exprs, + interpolate, })) } else { Ok(None) } } - /// Parse a table object for insertion - /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)` - pub fn parse_table_object(&mut self) -> Result { - if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) { - let fn_name = self.parse_object_name(false)?; - self.parse_function_call(fn_name) - .map(TableObject::TableFunction) - } else { - self.parse_object_name(false).map(TableObject::TableName) - } - } - /// Parse a possibly qualified, possibly quoted identifier, e.g. /// `foo` or `myschema."table" /// @@ -10368,118 +8220,49 @@ impl<'a> Parser<'a> { /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers /// in this context on BigQuery. pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result { - self.parse_object_name_inner(in_table_clause, false) - } - - /// Parse a possibly qualified, possibly quoted identifier, e.g. - /// `foo` or `myschema."table" - /// - /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN, - /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers - /// in this context on BigQuery. - /// - /// The `allow_wildcards` parameter indicates whether to allow for wildcards in the object name - /// e.g. *, *.*, `foo`.*, or "foo"."bar" - fn parse_object_name_inner( - &mut self, - in_table_clause: bool, - allow_wildcards: bool, - ) -> Result { - let mut parts = vec![]; - if dialect_of!(self is BigQueryDialect) && in_table_clause { - loop { - let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?; - parts.push(ObjectNamePart::Identifier(ident)); - if !self.consume_token(&Token::Period) && !end_with_period { - break; - } - } - } else { - loop { - if allow_wildcards && self.peek_token().token == Token::Mul { - let span = self.next_token().span; - parts.push(ObjectNamePart::Identifier(Ident { - value: Token::Mul.to_string(), - quote_style: None, - span, - })); - } else if dialect_of!(self is BigQueryDialect) && in_table_clause { - let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?; - parts.push(ObjectNamePart::Identifier(ident)); - if !self.consume_token(&Token::Period) && !end_with_period { - break; - } - } else if self.dialect.supports_object_name_double_dot_notation() - && parts.len() == 1 - && matches!(self.peek_token().token, Token::Period) - { - // Empty string here means default schema - parts.push(ObjectNamePart::Identifier(Ident::new(""))); - } else { - let ident = self.parse_identifier()?; - let part = if self - .dialect - .is_identifier_generating_function_name(&ident, &parts) - { - self.expect_token(&Token::LParen)?; - let args: Vec = - self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?; - self.expect_token(&Token::RParen)?; - ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args }) - } else { - ObjectNamePart::Identifier(ident) - }; - parts.push(part); - } - - if !self.consume_token(&Token::Period) { - break; - } + let mut idents = vec![]; + loop { + idents.push(self.parse_identifier(in_table_clause)?); + if !self.consume_token(&Token::Period) { + break; } } // BigQuery accepts any number of quoted identifiers of a table name. // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers if dialect_of!(self is BigQueryDialect) - && parts.iter().any(|part| { - part.as_ident() - .is_some_and(|ident| ident.value.contains('.')) - }) + && idents.iter().any(|ident| ident.value.contains('.')) { - parts = parts + idents = idents .into_iter() - .flat_map(|part| match part.as_ident() { - Some(ident) => ident + .flat_map(|ident| { + ident .value .split('.') - .map(|value| { - ObjectNamePart::Identifier(Ident { - value: value.into(), - quote_style: ident.quote_style, - span: ident.span, - }) + .map(|value| Ident { + value: value.into(), + quote_style: ident.quote_style, }) - .collect::>(), - None => vec![part], + .collect::>() }) .collect() } - Ok(ObjectName(parts)) + Ok(ObjectName(idents)) } /// Parse identifiers pub fn parse_identifiers(&mut self) -> Result, ParserError> { let mut idents = vec![]; loop { - match &self.peek_token_ref().token { + match self.peek_token().token { Token::Word(w) => { - idents.push(w.clone().into_ident(self.peek_token_ref().span)); + idents.push(w.to_ident()); } Token::EOF | Token::Eq => break, _ => {} } - self.advance_token(); + self.next_token(); } Ok(idents) } @@ -10527,9 +8310,8 @@ impl<'a> Parser<'a> { let mut idents = vec![]; // expecting at least one word for identifier - let next_token = self.next_token(); - match next_token.token { - Token::Word(w) => idents.push(w.into_ident(next_token.span)), + match self.next_token().token { + Token::Word(w) => idents.push(w.to_ident()), Token::EOF => { return Err(ParserError::ParserError( "Empty input when parsing identifier".to_string(), @@ -10546,22 +8328,19 @@ impl<'a> Parser<'a> { loop { match self.next_token().token { // ensure that optional period is succeeded by another identifier - Token::Period => { - let next_token = self.next_token(); - match next_token.token { - Token::Word(w) => idents.push(w.into_ident(next_token.span)), - Token::EOF => { - return Err(ParserError::ParserError( - "Trailing period in identifier".to_string(), - ))? - } - token => { - return Err(ParserError::ParserError(format!( - "Unexpected token following period in identifier: {token}" - )))? - } + Token::Period => match self.next_token().token { + Token::Word(w) => idents.push(w.to_ident()), + Token::EOF => { + return Err(ParserError::ParserError( + "Trailing period in identifier".to_string(), + ))? } - } + token => { + return Err(ParserError::ParserError(format!( + "Unexpected token following period in identifier: {token}" + )))? + } + }, Token::EOF => break, token => { return Err(ParserError::ParserError(format!( @@ -10575,33 +8354,29 @@ impl<'a> Parser<'a> { } /// Parse a simple one-word identifier (possibly quoted, possibly a keyword) - pub fn parse_identifier(&mut self) -> Result { + /// + /// The `in_table_clause` parameter indicates whether the identifier is a table in a FROM, JOIN, or + /// similar table clause. Currently, this is used only to support unquoted hyphenated identifiers in + // this context on BigQuery. + pub fn parse_identifier(&mut self, in_table_clause: bool) -> Result { let next_token = self.next_token(); match next_token.token { - Token::Word(w) => Ok(w.into_ident(next_token.span)), - Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)), - Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)), - _ => self.expected("identifier", next_token), - } - } - - /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or - /// TABLE clause. - /// - /// The first segment must be an ordinary unquoted identifier, e.g. it must not start - /// with a digit. Subsequent segments are either must either be valid identifiers or - /// integers, e.g. foo-123 is allowed, but foo-123a is not. - /// - /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical) - /// - /// Return a tuple of the identifier and a boolean indicating it ends with a period. - fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> { - match self.peek_token().token { Token::Word(w) => { - let quote_style_is_none = w.quote_style.is_none(); - let mut requires_whitespace = false; - let mut ident = w.into_ident(self.next_token().span); - if quote_style_is_none { + let mut ident = w.to_ident(); + + // On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or + // TABLE clause [0]. + // + // The first segment must be an ordinary unquoted identifier, e.g. it must not start + // with a digit. Subsequent segments are either must either be valid identifiers or + // integers, e.g. foo-123 is allowed, but foo-123a is not. + // + // [0] https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical + if dialect_of!(self is BigQueryDialect) + && w.quote_style.is_none() + && in_table_clause + { + let mut requires_whitespace = false; while matches!(self.peek_token_no_skip().token, Token::Minus) { self.next_token(); ident.value.push('-'); @@ -10609,36 +8384,15 @@ impl<'a> Parser<'a> { let token = self .next_token_no_skip() .cloned() - .unwrap_or(TokenWithSpan::wrap(Token::EOF)); + .unwrap_or(TokenWithLocation::wrap(Token::EOF)); requires_whitespace = match token.token { Token::Word(next_word) if next_word.quote_style.is_none() => { ident.value.push_str(&next_word.value); false } - Token::Number(s, false) => { - // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`. - // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`. - // - // If a number token is followed by a period, it is part of an [ObjectName]. - // Return the identifier with `true` if the number token is followed by a period, indicating that - // parsing should continue for the next part of the hyphenated identifier. - if s.ends_with('.') { - let Some(s) = s.split('.').next().filter(|s| { - !s.is_empty() && s.chars().all(|c| c.is_ascii_digit()) - }) else { - return self.expected( - "continuation of hyphenated identifier", - TokenWithSpan::new(Token::Number(s, false), token.span), - ); - }; - ident.value.push_str(s); - return Ok((ident, true)); - } else { - ident.value.push_str(&s); - } - // If next token is period, then it is part of an ObjectName and we don't expect whitespace - // after the number. - !matches!(self.peek_token().token, Token::Period) + Token::Number(s, false) if s.chars().all(|c| c.is_ascii_digit()) => { + ident.value.push_str(&s); + true } _ => { return self @@ -10657,9 +8411,11 @@ impl<'a> Parser<'a> { } } } - Ok((ident, false)) + Ok(ident) } - _ => Ok((self.parse_identifier()?, false)), + Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)), + Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)), + _ => self.expected("identifier", next_token), } } @@ -10670,11 +8426,7 @@ impl<'a> Parser<'a> { self.next_token(); Ok(vec![]) } else { - let cols = self.parse_comma_separated_with_trailing_commas( - Parser::parse_view_column, - self.dialect.supports_column_definition_trailing_commas(), - Self::is_reserved_for_column_alias, - )?; + let cols = self.parse_comma_separated(Parser::parse_view_column)?; self.expect_token(&Token::RParen)?; Ok(cols) } @@ -10685,8 +8437,18 @@ impl<'a> Parser<'a> { /// Parses a column definition within a view. fn parse_view_column(&mut self) -> Result { - let name = self.parse_identifier()?; - let options = self.parse_view_column_options()?; + let name = self.parse_identifier(false)?; + let options = if (dialect_of!(self is BigQueryDialect | GenericDialect) + && self.parse_keyword(Keyword::OPTIONS)) + || (dialect_of!(self is SnowflakeDialect | GenericDialect) + && self.parse_keyword(Keyword::COMMENT)) + { + self.prev_token(); + self.parse_optional_column_option()? + .map(|option| vec![option]) + } else { + None + }; let data_type = if dialect_of!(self is ClickHouseDialect) { Some(self.parse_data_type()?) } else { @@ -10699,72 +8461,18 @@ impl<'a> Parser<'a> { }) } - fn parse_view_column_options(&mut self) -> Result, ParserError> { - let mut options = Vec::new(); - loop { - let option = self.parse_optional_column_option()?; - if let Some(option) = option { - options.push(option); - } else { - break; - } - } - if options.is_empty() { - Ok(None) - } else if self.dialect.supports_space_separated_column_options() { - Ok(Some(ColumnOptions::SpaceSeparated(options))) - } else { - Ok(Some(ColumnOptions::CommaSeparated(options))) - } - } - - /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers. - /// For example: `(col1, "col 2", ...)` + /// Parse a parenthesized comma-separated list of unqualified, possibly quoted identifiers pub fn parse_parenthesized_column_list( &mut self, optional: IsOptional, allow_empty: bool, ) -> Result, ParserError> { - self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier()) - } - - /// Parses a parenthesized comma-separated list of index columns, which can be arbitrary - /// expressions with ordering information (and an opclass in some dialects). - fn parse_parenthesized_index_column_list(&mut self) -> Result, ParserError> { - self.parse_parenthesized_column_list_inner(Mandatory, false, |p| { - p.parse_create_index_expr() - }) - } - - /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers. - /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)` - pub fn parse_parenthesized_qualified_column_list( - &mut self, - optional: IsOptional, - allow_empty: bool, - ) -> Result, ParserError> { - self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| { - p.parse_object_name(true) - }) - } - - /// Parses a parenthesized comma-separated list of columns using - /// the provided function to parse each element. - fn parse_parenthesized_column_list_inner( - &mut self, - optional: IsOptional, - allow_empty: bool, - mut f: F, - ) -> Result, ParserError> - where - F: FnMut(&mut Parser) -> Result, - { if self.consume_token(&Token::LParen) { if allow_empty && self.peek_token().token == Token::RParen { self.next_token(); Ok(vec![]) } else { - let cols = self.parse_comma_separated(|p| f(p))?; + let cols = self.parse_comma_separated(|p| p.parse_identifier(false))?; self.expect_token(&Token::RParen)?; Ok(cols) } @@ -10775,21 +8483,6 @@ impl<'a> Parser<'a> { } } - /// Parses a parenthesized comma-separated list of table alias column definitions. - fn parse_table_alias_column_defs(&mut self) -> Result, ParserError> { - if self.consume_token(&Token::LParen) { - let cols = self.parse_comma_separated(|p| { - let name = p.parse_identifier()?; - let data_type = p.maybe_parse(|p| p.parse_data_type())?; - Ok(TableAliasColumnDef { name, data_type }) - })?; - self.expect_token(&Token::RParen)?; - Ok(cols) - } else { - Ok(vec![]) - } - } - pub fn parse_precision(&mut self) -> Result { self.expect_token(&Token::LParen)?; let n = self.parse_literal_uint()?; @@ -10815,7 +8508,7 @@ impl<'a> Parser<'a> { /// /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option), ParserError> { - self.expect_keyword_is(Keyword::DATETIME64)?; + self.expect_keyword(Keyword::DATETIME64)?; self.expect_token(&Token::LParen)?; let precision = self.parse_literal_uint()?; let time_zone = if self.consume_token(&Token::Comma) { @@ -10839,16 +8532,6 @@ impl<'a> Parser<'a> { } } - pub fn parse_optional_binary_length(&mut self) -> Result, ParserError> { - if self.consume_token(&Token::LParen) { - let binary_length = self.parse_binary_length()?; - self.expect_token(&Token::RParen)?; - Ok(Some(binary_length)) - } else { - Ok(None) - } - } - pub fn parse_character_length(&mut self) -> Result { if self.parse_keyword(Keyword::MAX) { return Ok(CharacterLength::Max); @@ -10864,14 +8547,6 @@ impl<'a> Parser<'a> { Ok(CharacterLength::IntegerLength { length, unit }) } - pub fn parse_binary_length(&mut self) -> Result { - if self.parse_keyword(Keyword::MAX) { - return Ok(BinaryLength::Max); - } - let length = self.parse_literal_uint()?; - Ok(BinaryLength::IntegerLength { length }) - } - pub fn parse_optional_precision_scale( &mut self, ) -> Result<(Option, Option), ParserError> { @@ -10948,13 +8623,6 @@ impl<'a> Parser<'a> { Ok(parent_type(inside_type.into())) } - /// Parse a DELETE statement, returning a `Box`ed SetExpr - /// - /// This is used to reduce the size of the stack frames in debug builds - fn parse_delete_setexpr_boxed(&mut self) -> Result, ParserError> { - Ok(Box::new(SetExpr::Delete(self.parse_delete()?))) - } - pub fn parse_delete(&mut self) -> Result { let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) { // `FROM` keyword is optional in BigQuery SQL. @@ -10963,7 +8631,7 @@ impl<'a> Parser<'a> { (vec![], false) } else { let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?; - self.expect_keyword_is(Keyword::FROM)?; + self.expect_keyword(Keyword::FROM)?; (tables, true) } } else { @@ -11045,7 +8713,6 @@ impl<'a> Parser<'a> { let mut analyze = false; let mut verbose = false; let mut query_plan = false; - let mut estimate = false; let mut format = None; let mut options = None; @@ -11058,8 +8725,6 @@ impl<'a> Parser<'a> { options = Some(self.parse_utility_options()?) } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) { query_plan = true; - } else if self.parse_keyword(Keyword::ESTIMATE) { - estimate = true; } else { analyze = self.parse_keyword(Keyword::ANALYZE); verbose = self.parse_keyword(Keyword::VERBOSE); @@ -11077,7 +8742,6 @@ impl<'a> Parser<'a> { analyze, verbose, query_plan, - estimate, statement: Box::new(statement), format, options, @@ -11115,9 +8779,7 @@ impl<'a> Parser<'a> { pub fn parse_query(&mut self) -> Result, ParserError> { let _guard = self.recursion_counter.try_decrease()?; let with = if self.parse_keyword(Keyword::WITH) { - let with_token = self.get_current_token(); Some(With { - with_token: with_token.clone().into(), recursive: self.parse_keyword(Keyword::RECURSIVE), cte_tables: self.parse_comma_separated(Parser::parse_cte)?, }) @@ -11128,42 +8790,30 @@ impl<'a> Parser<'a> { Ok(Query { with, body: self.parse_insert_setexpr_boxed()?, + limit: None, + limit_by: vec![], order_by: None, - limit_clause: None, + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], } .into()) } else if self.parse_keyword(Keyword::UPDATE) { Ok(Query { with, body: self.parse_update_setexpr_boxed()?, + limit: None, + limit_by: vec![], order_by: None, - limit_clause: None, + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], - } - .into()) - } else if self.parse_keyword(Keyword::DELETE) { - Ok(Query { - with, - body: self.parse_delete_setexpr_boxed()?, - limit_clause: None, - order_by: None, - fetch: None, - locks: vec![], - for_clause: None, - settings: None, - format_clause: None, - pipe_operators: vec![], } .into()) } else { @@ -11171,7 +8821,40 @@ impl<'a> Parser<'a> { let order_by = self.parse_optional_order_by()?; - let limit_clause = self.parse_optional_limit_clause()?; + let mut limit = None; + let mut offset = None; + + for _x in 0..2 { + if limit.is_none() && self.parse_keyword(Keyword::LIMIT) { + limit = self.parse_limit()? + } + + if offset.is_none() && self.parse_keyword(Keyword::OFFSET) { + offset = Some(self.parse_offset()?) + } + + if self.dialect.supports_limit_comma() + && limit.is_some() + && offset.is_none() + && self.consume_token(&Token::Comma) + { + // MySQL style LIMIT x,y => LIMIT y OFFSET x. + // Check for more details. + offset = Some(Offset { + value: limit.unwrap(), + rows: OffsetRows::None, + }); + limit = Some(self.parse_expr()?); + } + } + + let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect) + && self.parse_keyword(Keyword::BY) + { + self.parse_comma_separated(Parser::parse_expr)? + } else { + vec![] + }; let settings = self.parse_settings()?; @@ -11197,262 +8880,38 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::NULL) { Some(FormatClause::Null) } else { - let ident = self.parse_identifier()?; + let ident = self.parse_identifier(false)?; Some(FormatClause::Identifier(ident)) } } else { None }; - let pipe_operators = if self.dialect.supports_pipe_operator() { - self.parse_pipe_operators()? - } else { - Vec::new() - }; - Ok(Query { with, body, order_by, - limit_clause, + limit, + limit_by, + offset, fetch, locks, for_clause, settings, format_clause, - pipe_operators, } .into()) } } - fn parse_pipe_operators(&mut self) -> Result, ParserError> { - let mut pipe_operators = Vec::new(); - - while self.consume_token(&Token::VerticalBarRightAngleBracket) { - let kw = self.expect_one_of_keywords(&[ - Keyword::SELECT, - Keyword::EXTEND, - Keyword::SET, - Keyword::DROP, - Keyword::AS, - Keyword::WHERE, - Keyword::LIMIT, - Keyword::AGGREGATE, - Keyword::ORDER, - Keyword::TABLESAMPLE, - Keyword::RENAME, - Keyword::UNION, - Keyword::INTERSECT, - Keyword::EXCEPT, - Keyword::CALL, - Keyword::PIVOT, - Keyword::UNPIVOT, - Keyword::JOIN, - Keyword::INNER, - Keyword::LEFT, - Keyword::RIGHT, - Keyword::FULL, - Keyword::CROSS, - ])?; - match kw { - Keyword::SELECT => { - let exprs = self.parse_comma_separated(Parser::parse_select_item)?; - pipe_operators.push(PipeOperator::Select { exprs }) - } - Keyword::EXTEND => { - let exprs = self.parse_comma_separated(Parser::parse_select_item)?; - pipe_operators.push(PipeOperator::Extend { exprs }) - } - Keyword::SET => { - let assignments = self.parse_comma_separated(Parser::parse_assignment)?; - pipe_operators.push(PipeOperator::Set { assignments }) - } - Keyword::DROP => { - let columns = self.parse_identifiers()?; - pipe_operators.push(PipeOperator::Drop { columns }) - } - Keyword::AS => { - let alias = self.parse_identifier()?; - pipe_operators.push(PipeOperator::As { alias }) - } - Keyword::WHERE => { - let expr = self.parse_expr()?; - pipe_operators.push(PipeOperator::Where { expr }) - } - Keyword::LIMIT => { - let expr = self.parse_expr()?; - let offset = if self.parse_keyword(Keyword::OFFSET) { - Some(self.parse_expr()?) - } else { - None - }; - pipe_operators.push(PipeOperator::Limit { expr, offset }) - } - Keyword::AGGREGATE => { - let full_table_exprs = if self.peek_keyword(Keyword::GROUP) { - vec![] - } else { - self.parse_comma_separated(|parser| { - parser.parse_expr_with_alias_and_order_by() - })? - }; - - let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) { - self.parse_comma_separated(|parser| { - parser.parse_expr_with_alias_and_order_by() - })? - } else { - vec![] - }; - - pipe_operators.push(PipeOperator::Aggregate { - full_table_exprs, - group_by_expr, - }) - } - Keyword::ORDER => { - self.expect_one_of_keywords(&[Keyword::BY])?; - let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?; - pipe_operators.push(PipeOperator::OrderBy { exprs }) - } - Keyword::TABLESAMPLE => { - let sample = self.parse_table_sample(TableSampleModifier::TableSample)?; - pipe_operators.push(PipeOperator::TableSample { sample }); - } - Keyword::RENAME => { - let mappings = - self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?; - pipe_operators.push(PipeOperator::Rename { mappings }); - } - Keyword::UNION => { - let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union)); - let queries = self.parse_pipe_operator_queries()?; - pipe_operators.push(PipeOperator::Union { - set_quantifier, - queries, - }); - } - Keyword::INTERSECT => { - let set_quantifier = - self.parse_distinct_required_set_quantifier("INTERSECT")?; - let queries = self.parse_pipe_operator_queries()?; - pipe_operators.push(PipeOperator::Intersect { - set_quantifier, - queries, - }); - } - Keyword::EXCEPT => { - let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?; - let queries = self.parse_pipe_operator_queries()?; - pipe_operators.push(PipeOperator::Except { - set_quantifier, - queries, - }); - } - Keyword::CALL => { - let function_name = self.parse_object_name(false)?; - let function_expr = self.parse_function(function_name)?; - if let Expr::Function(function) = function_expr { - let alias = self.parse_identifier_optional_alias()?; - pipe_operators.push(PipeOperator::Call { function, alias }); - } else { - return Err(ParserError::ParserError( - "Expected function call after CALL".to_string(), - )); - } - } - Keyword::PIVOT => { - self.expect_token(&Token::LParen)?; - let aggregate_functions = - self.parse_comma_separated(Self::parse_aliased_function_call)?; - self.expect_keyword_is(Keyword::FOR)?; - let value_column = self.parse_period_separated(|p| p.parse_identifier())?; - self.expect_keyword_is(Keyword::IN)?; - - self.expect_token(&Token::LParen)?; - let value_source = if self.parse_keyword(Keyword::ANY) { - let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) { - self.parse_comma_separated(Parser::parse_order_by_expr)? - } else { - vec![] - }; - PivotValueSource::Any(order_by) - } else if self.peek_sub_query() { - PivotValueSource::Subquery(self.parse_query()?) - } else { - PivotValueSource::List( - self.parse_comma_separated(Self::parse_expr_with_alias)?, - ) - }; - self.expect_token(&Token::RParen)?; - self.expect_token(&Token::RParen)?; - - let alias = self.parse_identifier_optional_alias()?; - - pipe_operators.push(PipeOperator::Pivot { - aggregate_functions, - value_column, - value_source, - alias, - }); - } - Keyword::UNPIVOT => { - self.expect_token(&Token::LParen)?; - let value_column = self.parse_identifier()?; - self.expect_keyword(Keyword::FOR)?; - let name_column = self.parse_identifier()?; - self.expect_keyword(Keyword::IN)?; - - self.expect_token(&Token::LParen)?; - let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?; - self.expect_token(&Token::RParen)?; - - self.expect_token(&Token::RParen)?; - - let alias = self.parse_identifier_optional_alias()?; - - pipe_operators.push(PipeOperator::Unpivot { - value_column, - name_column, - unpivot_columns, - alias, - }); - } - Keyword::JOIN - | Keyword::INNER - | Keyword::LEFT - | Keyword::RIGHT - | Keyword::FULL - | Keyword::CROSS => { - self.prev_token(); - let mut joins = self.parse_joins()?; - if joins.len() != 1 { - return Err(ParserError::ParserError( - "Join pipe operator must have a single join".to_string(), - )); - } - let join = joins.swap_remove(0); - pipe_operators.push(PipeOperator::Join(join)) - } - unhandled => { - return Err(ParserError::ParserError(format!( - "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}" - ))) - } - } - } - Ok(pipe_operators) - } - fn parse_settings(&mut self) -> Result>, ParserError> { let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect) && self.parse_keyword(Keyword::SETTINGS) { let key_values = self.parse_comma_separated(|p| { - let key = p.parse_identifier()?; + let key = p.parse_identifier(false)?; p.expect_token(&Token::Eq)?; - let value = p.parse_expr()?; + let value = p.parse_value()?; Ok(Setting { key, value }) })?; Some(key_values) @@ -11511,7 +8970,7 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::ELEMENTS) { elements = true; } else if self.parse_keyword(Keyword::BINARY) { - self.expect_keyword_is(Keyword::BASE64)?; + self.expect_keyword(Keyword::BASE64)?; binary_base64 = true; } else if self.parse_keyword(Keyword::ROOT) { self.expect_token(&Token::LParen)?; @@ -11566,7 +9025,7 @@ impl<'a> Parser<'a> { /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`) pub fn parse_cte(&mut self) -> Result { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let mut cte = if self.parse_keyword(Keyword::AS) { let mut is_materialized = None; @@ -11578,10 +9037,8 @@ impl<'a> Parser<'a> { } } self.expect_token(&Token::LParen)?; - let query = self.parse_query()?; - let closing_paren_token = self.expect_token(&Token::RParen)?; - + self.expect_token(&Token::RParen)?; let alias = TableAlias { name, columns: vec![], @@ -11591,11 +9048,10 @@ impl<'a> Parser<'a> { query, from: None, materialized: is_materialized, - closing_paren_token: closing_paren_token.into(), } } else { - let columns = self.parse_table_alias_column_defs()?; - self.expect_keyword_is(Keyword::AS)?; + let columns = self.parse_parenthesized_column_list(Optional, false)?; + self.expect_keyword(Keyword::AS)?; let mut is_materialized = None; if dialect_of!(self is PostgreSqlDialect) { if self.parse_keyword(Keyword::MATERIALIZED) { @@ -11605,21 +9061,18 @@ impl<'a> Parser<'a> { } } self.expect_token(&Token::LParen)?; - let query = self.parse_query()?; - let closing_paren_token = self.expect_token(&Token::RParen)?; - + self.expect_token(&Token::RParen)?; let alias = TableAlias { name, columns }; Cte { alias, query, from: None, materialized: is_materialized, - closing_paren_token: closing_paren_token.into(), } }; if self.parse_keyword(Keyword::FROM) { - cte.from = Some(self.parse_identifier()?); + cte.from = Some(self.parse_identifier(false)?); } Ok(cte) } @@ -11635,9 +9088,7 @@ impl<'a> Parser<'a> { pub fn parse_query_body(&mut self, precedence: u8) -> Result, ParserError> { // We parse the expression using a Pratt parser, as in `parse_expr()`. // Start by parsing a restricted SELECT or a `(subquery)`: - let expr = if self.peek_keyword(Keyword::SELECT) - || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select()) - { + let expr = if self.parse_keyword(Keyword::SELECT) { SetExpr::Select(self.parse_select().map(Box::new)?) } else if self.consume_token(&Token::LParen) { // CTEs are not allowed here, but the parser currently accepts them @@ -11672,9 +9123,7 @@ impl<'a> Parser<'a> { let op = self.parse_set_operator(&self.peek_token().token); let next_precedence = match op { // UNION and EXCEPT have the same binding power and evaluate left-to-right - Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => { - 10 - } + Some(SetOperator::Union) | Some(SetOperator::Except) => 10, // INTERSECT has higher precedence than UNION/EXCEPT Some(SetOperator::Intersect) => 20, // Unexpected token or EOF => stop parsing the query body @@ -11701,19 +9150,13 @@ impl<'a> Parser<'a> { Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union), Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except), Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect), - Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus), _ => None, } } pub fn parse_set_quantifier(&mut self, op: &Option) -> SetQuantifier { match op { - Some( - SetOperator::Except - | SetOperator::Intersect - | SetOperator::Union - | SetOperator::Minus, - ) => { + Some(SetOperator::Except | SetOperator::Intersect | SetOperator::Union) => { if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) { SetQuantifier::DistinctByName } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) { @@ -11734,44 +9177,21 @@ impl<'a> Parser<'a> { } } - /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`) + /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`), + /// assuming the initial `SELECT` was already consumed pub fn parse_select(&mut self) -> Result { - let mut from_first = None; - - if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) { - let from_token = self.expect_keyword(Keyword::FROM)?; - let from = self.parse_table_with_joins()?; - if !self.peek_keyword(Keyword::SELECT) { - return Ok(Select { - select_token: AttachedToken(from_token), - distinct: None, - top: None, - top_before_distinct: false, - projection: vec![], - exclude: None, - into: None, - from, - lateral_views: vec![], - prewhere: None, - selection: None, - group_by: GroupByExpr::Expressions(vec![], vec![]), - cluster_by: vec![], - distribute_by: vec![], - sort_by: vec![], - having: None, - named_window: vec![], - window_before_qualify: false, - qualify: None, - value_table_mode: None, - connect_by: None, - flavor: SelectFlavor::FromFirstNoSelect, - }); - } - from_first = Some(from); - } - - let select_token = self.expect_keyword(Keyword::SELECT)?; - let value_table_mode = self.parse_value_table_mode()?; + let value_table_mode = + if dialect_of!(self is BigQueryDialect) && self.parse_keyword(Keyword::AS) { + if self.parse_keyword(Keyword::VALUE) { + Some(ValueTableMode::AsValue) + } else if self.parse_keyword(Keyword::STRUCT) { + Some(ValueTableMode::AsStruct) + } else { + self.expected("VALUE or STRUCT", self.peek_token())? + } + } else { + None + }; let mut top_before_distinct = false; let mut top = None; @@ -11784,21 +9204,21 @@ impl<'a> Parser<'a> { top = Some(self.parse_top()?); } - let projection = - if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) { - vec![] - } else { - self.parse_projection()? - }; - - let exclude = if self.dialect.supports_select_exclude() { - self.parse_optional_select_item_exclude()? - } else { - None - }; + let projection = self.parse_projection()?; let into = if self.parse_keyword(Keyword::INTO) { - Some(self.parse_select_into()?) + let temporary = self + .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY]) + .is_some(); + let unlogged = self.parse_keyword(Keyword::UNLOGGED); + let table = self.parse_keyword(Keyword::TABLE); + let name = self.parse_object_name(false)?; + Some(SelectInto { + temporary, + unlogged, + table, + name, + }) } else { None }; @@ -11808,12 +9228,10 @@ impl<'a> Parser<'a> { // otherwise they may be parsed as an alias as part of the `projection` // or `from`. - let (from, from_first) = if let Some(from) = from_first.take() { - (from, true) - } else if self.parse_keyword(Keyword::FROM) { - (self.parse_table_with_joins()?, false) + let from = if self.parse_keyword(Keyword::FROM) { + self.parse_comma_separated(Parser::parse_table_and_joins)? } else { - (vec![], false) + vec![] }; let mut lateral_views = vec![]; @@ -11878,7 +9296,7 @@ impl<'a> Parser<'a> { }; let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) { - self.parse_comma_separated(Parser::parse_order_by_expr)? + self.parse_comma_separated(Parser::parse_expr)? } else { vec![] }; @@ -11925,12 +9343,10 @@ impl<'a> Parser<'a> { }; Ok(Select { - select_token: AttachedToken(select_token), distinct, top, top_before_distinct, projection, - exclude, into, from, lateral_views, @@ -11946,40 +9362,9 @@ impl<'a> Parser<'a> { qualify, value_table_mode, connect_by, - flavor: if from_first { - SelectFlavor::FromFirst - } else { - SelectFlavor::Standard - }, }) } - fn parse_value_table_mode(&mut self) -> Result, ParserError> { - if !dialect_of!(self is BigQueryDialect) { - return Ok(None); - } - - let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) { - Some(ValueTableMode::DistinctAsValue) - } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) { - Some(ValueTableMode::DistinctAsStruct) - } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE]) - || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE]) - { - Some(ValueTableMode::AsValue) - } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT]) - || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT]) - { - Some(ValueTableMode::AsStruct) - } else if self.parse_keyword(Keyword::AS) { - self.expected("VALUE or STRUCT", self.peek_token())? - } else { - None - }; - - Ok(mode) - } - /// Invoke `f` after first setting the parser's `ParserState` to `state`. /// /// Upon return, restores the parser's state to what it started at. @@ -12063,326 +9448,164 @@ impl<'a> Parser<'a> { } /// Parse a `SET ROLE` statement. Expects SET to be consumed already. - fn parse_set_role( - &mut self, - modifier: Option, - ) -> Result { - self.expect_keyword_is(Keyword::ROLE)?; + fn parse_set_role(&mut self, modifier: Option) -> Result { + self.expect_keyword(Keyword::ROLE)?; + let context_modifier = match modifier { + Some(Keyword::LOCAL) => ContextModifier::Local, + Some(Keyword::SESSION) => ContextModifier::Session, + _ => ContextModifier::None, + }; let role_name = if self.parse_keyword(Keyword::NONE) { None } else { - Some(self.parse_identifier()?) + Some(self.parse_identifier(false)?) }; - Ok(Statement::Set(Set::SetRole { - context_modifier: modifier, + Ok(Statement::SetRole { + context_modifier, role_name, - })) + }) } - fn parse_set_values( - &mut self, - parenthesized_assignment: bool, - ) -> Result, ParserError> { - let mut values = vec![]; - - if parenthesized_assignment { - self.expect_token(&Token::LParen)?; - } - - loop { - let value = if let Some(expr) = self.try_parse_expr_sub_query()? { - expr - } else if let Ok(expr) = self.parse_expr() { - expr - } else { - self.expected("variable value", self.peek_token())? - }; - - values.push(value); - if self.consume_token(&Token::Comma) { - continue; - } - - if parenthesized_assignment { - self.expect_token(&Token::RParen)?; - } - return Ok(values); - } - } - - fn parse_context_modifier(&mut self) -> Option { + pub fn parse_set(&mut self) -> Result { let modifier = - self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?; - - Self::keyword_to_modifier(modifier) - } - - /// Parse a single SET statement assignment `var = expr`. - fn parse_set_assignment(&mut self) -> Result { - let scope = self.parse_context_modifier(); - - let name = if self.dialect.supports_parenthesized_set_variables() - && self.consume_token(&Token::LParen) - { - // Parenthesized assignments are handled in the `parse_set` function after - // trying to parse list of assignments using this function. - // If a dialect supports both, and we find a LParen, we early exit from this function. - self.expected("Unparenthesized assignment", self.peek_token())? - } else { - self.parse_object_name(false)? - }; - - if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) { - return self.expected("assignment operator", self.peek_token()); - } - - let value = self.parse_expr()?; - - Ok(SetAssignment { scope, name, value }) - } - - fn parse_set(&mut self) -> Result { - let hivevar = self.parse_keyword(Keyword::HIVEVAR); - - // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both - let scope = if !hivevar { - self.parse_context_modifier() - } else { - None - }; - - if hivevar { + self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::HIVEVAR]); + if let Some(Keyword::HIVEVAR) = modifier { self.expect_token(&Token::Colon)?; - } - - if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? { + } else if let Some(set_role_stmt) = + self.maybe_parse(|parser| parser.parse_set_role(modifier))? + { return Ok(set_role_stmt); } - // Handle special cases first - if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE]) - || self.parse_keyword(Keyword::TIMEZONE) + let variables = if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE]) { + OneOrManyWithParens::One(ObjectName(vec!["TIMEZONE".into()])) + } else if self.dialect.supports_parenthesized_set_variables() + && self.consume_token(&Token::LParen) + { + let variables = OneOrManyWithParens::Many( + self.parse_comma_separated(|parser: &mut Parser<'a>| { + parser.parse_identifier(false) + })? + .into_iter() + .map(|ident| ObjectName(vec![ident])) + .collect(), + ); + self.expect_token(&Token::RParen)?; + variables + } else { + OneOrManyWithParens::One(self.parse_object_name(false)?) + }; + + if matches!(&variables, OneOrManyWithParens::One(variable) if variable.to_string().eq_ignore_ascii_case("NAMES") + && dialect_of!(self is MySqlDialect | GenericDialect)) { - if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) { - return Ok(Set::SingleAssignment { - scope, - hivevar, - variable: ObjectName::from(vec!["TIMEZONE".into()]), - values: self.parse_set_values(false)?, - } - .into()); - } else { - // A shorthand alias for SET TIME ZONE that doesn't require - // the assignment operator. It's originally PostgreSQL specific, - // but we allow it for all the dialects - return Ok(Set::SetTimeZone { - local: scope == Some(ContextModifier::Local), - value: self.parse_expr()?, - } - .into()); - } - } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) { if self.parse_keyword(Keyword::DEFAULT) { - return Ok(Set::SetNamesDefault {}.into()); + return Ok(Statement::SetNamesDefault {}); } - let charset_name = self.parse_identifier()?; + + let charset_name = self.parse_literal_string()?; let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() { Some(self.parse_literal_string()?) } else { None }; - return Ok(Set::SetNames { + return Ok(Statement::SetNames { charset_name, collation_name, + }); + } + + let parenthesized_assignment = matches!(&variables, OneOrManyWithParens::Many(_)); + + if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) { + if parenthesized_assignment { + self.expect_token(&Token::LParen)?; } - .into()); - } else if self.parse_keyword(Keyword::CHARACTERISTICS) { + + let mut values = vec![]; + loop { + let value = if let Some(expr) = self.try_parse_expr_sub_query()? { + expr + } else if let Ok(expr) = self.parse_expr() { + expr + } else { + self.expected("variable value", self.peek_token())? + }; + + values.push(value); + if self.consume_token(&Token::Comma) { + continue; + } + + if parenthesized_assignment { + self.expect_token(&Token::RParen)?; + } + return Ok(Statement::SetVariable { + local: modifier == Some(Keyword::LOCAL), + hivevar: Some(Keyword::HIVEVAR) == modifier, + variables, + value: values, + }); + } + } + + let OneOrManyWithParens::One(variable) = variables else { + return self.expected("set variable", self.peek_token()); + }; + + if variable.to_string().eq_ignore_ascii_case("TIMEZONE") { + // for some db (e.g. postgresql), SET TIME ZONE is an alias for SET TIMEZONE [TO|=] + match self.parse_expr() { + Ok(expr) => Ok(Statement::SetTimeZone { + local: modifier == Some(Keyword::LOCAL), + value: expr, + }), + _ => self.expected("timezone value", self.peek_token())?, + } + } else if variable.to_string() == "CHARACTERISTICS" { self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?; - return Ok(Set::SetTransaction { + Ok(Statement::SetTransaction { modes: self.parse_transaction_modes()?, snapshot: None, session: true, - } - .into()); - } else if self.parse_keyword(Keyword::TRANSACTION) { + }) + } else if variable.to_string() == "TRANSACTION" && modifier.is_none() { if self.parse_keyword(Keyword::SNAPSHOT) { - let snapshot_id = self.parse_value()?.value; - return Ok(Set::SetTransaction { + let snapshot_id = self.parse_value()?; + return Ok(Statement::SetTransaction { modes: vec![], snapshot: Some(snapshot_id), session: false, - } - .into()); + }); } - return Ok(Set::SetTransaction { + Ok(Statement::SetTransaction { modes: self.parse_transaction_modes()?, snapshot: None, session: false, - } - .into()); - } - - if self.dialect.supports_comma_separated_set_assignments() { - if scope.is_some() { - self.prev_token(); - } - - if let Some(assignments) = self - .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))? - { - return if assignments.len() > 1 { - Ok(Set::MultipleAssignments { assignments }.into()) - } else { - let SetAssignment { scope, name, value } = - assignments.into_iter().next().ok_or_else(|| { - ParserError::ParserError("Expected at least one assignment".to_string()) - })?; - - Ok(Set::SingleAssignment { - scope, - hivevar, - variable: name, - values: vec![value], - } - .into()) - }; - } - } - - let variables = if self.dialect.supports_parenthesized_set_variables() - && self.consume_token(&Token::LParen) - { - let vars = OneOrManyWithParens::Many( - self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())? - .into_iter() - .map(|ident| ObjectName::from(vec![ident])) - .collect(), - ); - self.expect_token(&Token::RParen)?; - vars + }) } else { - OneOrManyWithParens::One(self.parse_object_name(false)?) - }; - - if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) { - let stmt = match variables { - OneOrManyWithParens::One(var) => Set::SingleAssignment { - scope, - hivevar, - variable: var, - values: self.parse_set_values(false)?, - }, - OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments { - variables: vars, - values: self.parse_set_values(true)?, - }, - }; - - return Ok(stmt.into()); - } - - if self.dialect.supports_set_stmt_without_operator() { - self.prev_token(); - return self.parse_set_session_params(); - }; - - self.expected("equals sign or TO", self.peek_token()) - } - - pub fn parse_set_session_params(&mut self) -> Result { - if self.parse_keyword(Keyword::STATISTICS) { - let topic = match self.parse_one_of_keywords(&[ - Keyword::IO, - Keyword::PROFILE, - Keyword::TIME, - Keyword::XML, - ]) { - Some(Keyword::IO) => SessionParamStatsTopic::IO, - Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile, - Some(Keyword::TIME) => SessionParamStatsTopic::Time, - Some(Keyword::XML) => SessionParamStatsTopic::Xml, - _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()), - }; - let value = self.parse_session_param_value()?; - Ok( - Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics { - topic, - value, - })) - .into(), - ) - } else if self.parse_keyword(Keyword::IDENTITY_INSERT) { - let obj = self.parse_object_name(false)?; - let value = self.parse_session_param_value()?; - Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert( - SetSessionParamIdentityInsert { obj, value }, - )) - .into()) - } else if self.parse_keyword(Keyword::OFFSETS) { - let keywords = self.parse_comma_separated(|parser| { - let next_token = parser.next_token(); - match &next_token.token { - Token::Word(w) => Ok(w.to_string()), - _ => parser.expected("SQL keyword", next_token), - } - })?; - let value = self.parse_session_param_value()?; - Ok( - Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets { - keywords, - value, - })) - .into(), - ) - } else { - let names = self.parse_comma_separated(|parser| { - let next_token = parser.next_token(); - match next_token.token { - Token::Word(w) => Ok(w.to_string()), - _ => parser.expected("Session param name", next_token), - } - })?; - let value = self.parse_expr()?.to_string(); - Ok( - Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric { - names, - value, - })) - .into(), - ) - } - } - - fn parse_session_param_value(&mut self) -> Result { - if self.parse_keyword(Keyword::ON) { - Ok(SessionParamValue::On) - } else if self.parse_keyword(Keyword::OFF) { - Ok(SessionParamValue::Off) - } else { - self.expected("ON or OFF", self.peek_token()) + self.expected("equals sign or TO", self.peek_token()) } } pub fn parse_show(&mut self) -> Result { - let terse = self.parse_keyword(Keyword::TERSE); let extended = self.parse_keyword(Keyword::EXTENDED); let full = self.parse_keyword(Keyword::FULL); let session = self.parse_keyword(Keyword::SESSION); let global = self.parse_keyword(Keyword::GLOBAL); - let external = self.parse_keyword(Keyword::EXTERNAL); if self .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS]) .is_some() { Ok(self.parse_show_columns(extended, full)?) } else if self.parse_keyword(Keyword::TABLES) { - Ok(self.parse_show_tables(terse, extended, full, external)?) + Ok(self.parse_show_tables(extended, full)?) } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) { - Ok(self.parse_show_views(terse, true)?) + Ok(self.parse_show_views(true)?) } else if self.parse_keyword(Keyword::VIEWS) { - Ok(self.parse_show_views(terse, false)?) + Ok(self.parse_show_views(false)?) } else if self.parse_keyword(Keyword::FUNCTIONS) { Ok(self.parse_show_functions()?) } else if extended || full { @@ -12410,9 +9633,9 @@ impl<'a> Parser<'a> { global, }) } else if self.parse_keyword(Keyword::DATABASES) { - self.parse_show_databases(terse) + self.parse_show_databases() } else if self.parse_keyword(Keyword::SCHEMAS) { - self.parse_show_schemas(terse) + self.parse_show_schemas() } else { Ok(Statement::ShowVariable { variable: self.parse_identifiers()?, @@ -12420,23 +9643,15 @@ impl<'a> Parser<'a> { } } - fn parse_show_databases(&mut self, terse: bool) -> Result { - let history = self.parse_keyword(Keyword::HISTORY); - let show_options = self.parse_show_stmt_options()?; + fn parse_show_databases(&mut self) -> Result { Ok(Statement::ShowDatabases { - terse, - history, - show_options, + filter: self.parse_show_statement_filter()?, }) } - fn parse_show_schemas(&mut self, terse: bool) -> Result { - let history = self.parse_keyword(Keyword::HISTORY); - let show_options = self.parse_show_stmt_options()?; + fn parse_show_schemas(&mut self) -> Result { Ok(Statement::ShowSchemas { - terse, - history, - show_options, + filter: self.parse_show_statement_filter()?, }) } @@ -12470,43 +9685,58 @@ impl<'a> Parser<'a> { extended: bool, full: bool, ) -> Result { - let show_options = self.parse_show_stmt_options()?; + self.expect_one_of_keywords(&[Keyword::FROM, Keyword::IN])?; + let object_name = self.parse_object_name(false)?; + let table_name = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) { + Some(_) => { + let db_name = vec![self.parse_identifier(false)?]; + let ObjectName(table_name) = object_name; + let object_name = db_name.into_iter().chain(table_name).collect(); + ObjectName(object_name) + } + None => object_name, + }; + let filter = self.parse_show_statement_filter()?; Ok(Statement::ShowColumns { extended, full, - show_options, + table_name, + filter, }) } - fn parse_show_tables( + pub fn parse_show_tables( &mut self, - terse: bool, extended: bool, full: bool, - external: bool, ) -> Result { - let history = !external && self.parse_keyword(Keyword::HISTORY); - let show_options = self.parse_show_stmt_options()?; + let (clause, db_name) = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) { + Some(Keyword::FROM) => (Some(ShowClause::FROM), Some(self.parse_identifier(false)?)), + Some(Keyword::IN) => (Some(ShowClause::IN), Some(self.parse_identifier(false)?)), + _ => (None, None), + }; + let filter = self.parse_show_statement_filter()?; Ok(Statement::ShowTables { - terse, - history, extended, full, - external, - show_options, + clause, + db_name, + filter, }) } - fn parse_show_views( - &mut self, - terse: bool, - materialized: bool, - ) -> Result { - let show_options = self.parse_show_stmt_options()?; + fn parse_show_views(&mut self, materialized: bool) -> Result { + let (clause, db_name) = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) { + Some(Keyword::FROM) => (Some(ShowClause::FROM), Some(self.parse_identifier(false)?)), + Some(Keyword::IN) => (Some(ShowClause::IN), Some(self.parse_identifier(false)?)), + _ => (None, None), + }; + let filter = self.parse_show_statement_filter()?; Ok(Statement::ShowViews { materialized, - terse, - show_options, + clause, + db_name, + filter, }) } @@ -12554,56 +9784,28 @@ impl<'a> Parser<'a> { } else if dialect_of!(self is DatabricksDialect) { self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA]) } else if dialect_of!(self is SnowflakeDialect) { - self.parse_one_of_keywords(&[ - Keyword::DATABASE, - Keyword::SCHEMA, - Keyword::WAREHOUSE, - Keyword::ROLE, - Keyword::SECONDARY, - ]) + self.parse_one_of_keywords(&[Keyword::DATABASE, Keyword::SCHEMA, Keyword::WAREHOUSE]) } else { None // No specific keywords for other dialects, including GenericDialect }; - let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) { - self.parse_secondary_roles()? - } else { - let obj_name = self.parse_object_name(false)?; - match parsed_keyword { - Some(Keyword::CATALOG) => Use::Catalog(obj_name), - Some(Keyword::DATABASE) => Use::Database(obj_name), - Some(Keyword::SCHEMA) => Use::Schema(obj_name), - Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name), - Some(Keyword::ROLE) => Use::Role(obj_name), - _ => Use::Object(obj_name), - } + let obj_name = self.parse_object_name(false)?; + let result = match parsed_keyword { + Some(Keyword::CATALOG) => Use::Catalog(obj_name), + Some(Keyword::DATABASE) => Use::Database(obj_name), + Some(Keyword::SCHEMA) => Use::Schema(obj_name), + Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name), + _ => Use::Object(obj_name), }; Ok(Statement::Use(result)) } - fn parse_secondary_roles(&mut self) -> Result { - self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?; - if self.parse_keyword(Keyword::NONE) { - Ok(Use::SecondaryRoles(SecondaryRoles::None)) - } else if self.parse_keyword(Keyword::ALL) { - Ok(Use::SecondaryRoles(SecondaryRoles::All)) - } else { - let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?; - Ok(Use::SecondaryRoles(SecondaryRoles::List(roles))) - } - } - pub fn parse_table_and_joins(&mut self) -> Result { let relation = self.parse_table_factor()?; // Note that for keywords to be properly handled here, they need to be // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as // a table alias. - let joins = self.parse_joins()?; - Ok(TableWithJoins { relation, joins }) - } - - fn parse_joins(&mut self) -> Result, ParserError> { let mut joins = vec![]; loop { let global = self.parse_keyword(Keyword::GLOBAL); @@ -12623,16 +9825,16 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::OUTER) { // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1 - self.expect_keyword_is(Keyword::APPLY)?; + self.expect_keyword(Keyword::APPLY)?; Join { relation: self.parse_table_factor()?, global, join_operator: JoinOperator::OuterApply, } } else if self.parse_keyword(Keyword::ASOF) { - self.expect_keyword_is(Keyword::JOIN)?; + self.expect_keyword(Keyword::JOIN)?; let relation = self.parse_table_factor()?; - self.expect_keyword_is(Keyword::MATCH_CONDITION)?; + self.expect_keyword(Keyword::MATCH_CONDITION)?; let match_condition = self.parse_parenthesized(Self::parse_expr)?; Join { relation, @@ -12652,13 +9854,9 @@ impl<'a> Parser<'a> { let join_operator_type = match peek_keyword { Keyword::INNER | Keyword::JOIN => { - let inner = self.parse_keyword(Keyword::INNER); // [ INNER ] - self.expect_keyword_is(Keyword::JOIN)?; - if inner { - JoinOperator::Inner - } else { - JoinOperator::Join - } + let _ = self.parse_keyword(Keyword::INNER); // [ INNER ] + self.expect_keyword(Keyword::JOIN)?; + JoinOperator::Inner } kw @ Keyword::LEFT | kw @ Keyword::RIGHT => { let _ = self.next_token(); // consume LEFT/RIGHT @@ -12671,7 +9869,7 @@ impl<'a> Parser<'a> { ]); match join_type { Some(Keyword::OUTER) => { - self.expect_keyword_is(Keyword::JOIN)?; + self.expect_keyword(Keyword::JOIN)?; if is_left { JoinOperator::LeftOuter } else { @@ -12679,7 +9877,7 @@ impl<'a> Parser<'a> { } } Some(Keyword::SEMI) => { - self.expect_keyword_is(Keyword::JOIN)?; + self.expect_keyword(Keyword::JOIN)?; if is_left { JoinOperator::LeftSemi } else { @@ -12687,7 +9885,7 @@ impl<'a> Parser<'a> { } } Some(Keyword::ANTI) => { - self.expect_keyword_is(Keyword::JOIN)?; + self.expect_keyword(Keyword::JOIN)?; if is_left { JoinOperator::LeftAnti } else { @@ -12696,9 +9894,9 @@ impl<'a> Parser<'a> { } Some(Keyword::JOIN) => { if is_left { - JoinOperator::Left + JoinOperator::LeftOuter } else { - JoinOperator::Right + JoinOperator::RightOuter } } _ => { @@ -12708,48 +9906,21 @@ impl<'a> Parser<'a> { } } } - Keyword::ANTI => { - let _ = self.next_token(); // consume ANTI - self.expect_keyword_is(Keyword::JOIN)?; - JoinOperator::Anti - } - Keyword::SEMI => { - let _ = self.next_token(); // consume SEMI - self.expect_keyword_is(Keyword::JOIN)?; - JoinOperator::Semi - } Keyword::FULL => { let _ = self.next_token(); // consume FULL let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ] - self.expect_keyword_is(Keyword::JOIN)?; + self.expect_keyword(Keyword::JOIN)?; JoinOperator::FullOuter } Keyword::OUTER => { return self.expected("LEFT, RIGHT, or FULL", self.peek_token()); } - Keyword::STRAIGHT_JOIN => { - let _ = self.next_token(); // consume STRAIGHT_JOIN - JoinOperator::StraightJoin - } _ if natural => { return self.expected("a join type after NATURAL", self.peek_token()); } _ => break, }; - let mut relation = self.parse_table_factor()?; - - if !self - .dialect - .supports_left_associative_joins_without_parens() - && self.peek_parens_less_nested_join() - { - let joins = self.parse_joins()?; - relation = TableFactor::NestedJoin { - table_with_joins: Box::new(TableWithJoins { relation, joins }), - alias: None, - }; - } - + let relation = self.parse_table_factor()?; let join_constraint = self.parse_join_constraint(natural)?; Join { relation, @@ -12759,21 +9930,7 @@ impl<'a> Parser<'a> { }; joins.push(join); } - Ok(joins) - } - - fn peek_parens_less_nested_join(&self) -> bool { - matches!( - self.peek_token_ref().token, - Token::Word(Word { - keyword: Keyword::JOIN - | Keyword::INNER - | Keyword::LEFT - | Keyword::RIGHT - | Keyword::FULL, - .. - }) - ) + Ok(TableWithJoins { relation, joins }) } /// A table name or a parenthesized subquery, followed by optional `[AS] alias` @@ -12786,7 +9943,7 @@ impl<'a> Parser<'a> { let name = self.parse_object_name(false)?; self.expect_token(&Token::LParen)?; let args = self.parse_optional_args()?; - let alias = self.maybe_parse_table_alias()?; + let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; Ok(TableFactor::Function { lateral: true, name, @@ -12799,7 +9956,7 @@ impl<'a> Parser<'a> { self.expect_token(&Token::LParen)?; let expr = self.parse_expr()?; self.expect_token(&Token::RParen)?; - let alias = self.maybe_parse_table_alias()?; + let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; Ok(TableFactor::TableFunction { expr, alias }) } else if self.consume_token(&Token::LParen) { // A left paren introduces either a derived table (i.e., a subquery) @@ -12848,7 +10005,7 @@ impl<'a> Parser<'a> { #[allow(clippy::if_same_then_else)] if !table_and_joins.joins.is_empty() { self.expect_token(&Token::RParen)?; - let alias = self.maybe_parse_table_alias()?; + let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; Ok(TableFactor::NestedJoin { table_with_joins: Box::new(table_and_joins), alias, @@ -12861,7 +10018,7 @@ impl<'a> Parser<'a> { // (B): `table_and_joins` (what we found inside the parentheses) // is a nested join `(foo JOIN bar)`, not followed by other joins. self.expect_token(&Token::RParen)?; - let alias = self.maybe_parse_table_alias()?; + let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; Ok(TableFactor::NestedJoin { table_with_joins: Box::new(table_and_joins), alias, @@ -12875,7 +10032,9 @@ impl<'a> Parser<'a> { // [AS alias])`) as well. self.expect_token(&Token::RParen)?; - if let Some(outer_alias) = self.maybe_parse_table_alias()? { + if let Some(outer_alias) = + self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)? + { // Snowflake also allows specifying an alias *after* parens // e.g. `FROM (mytable) AS alias` match &mut table_and_joins.relation { @@ -12884,8 +10043,6 @@ impl<'a> Parser<'a> { | TableFactor::Function { alias, .. } | TableFactor::UNNEST { alias, .. } | TableFactor::JsonTable { alias, .. } - | TableFactor::XmlTable { alias, .. } - | TableFactor::OpenJsonTable { alias, .. } | TableFactor::TableFunction { alias, .. } | TableFactor::Pivot { alias, .. } | TableFactor::Unpivot { alias, .. } @@ -12923,26 +10080,27 @@ impl<'a> Parser<'a> { ] ) { - self.expect_keyword_is(Keyword::VALUES)?; + self.expect_keyword(Keyword::VALUES)?; // Snowflake and Databricks allow syntax like below: // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2) // where there are no parentheses around the VALUES clause. let values = SetExpr::Values(self.parse_values(false)?); - let alias = self.maybe_parse_table_alias()?; + let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; Ok(TableFactor::Derived { lateral: false, subquery: Box::new(Query { with: None, body: Box::new(values), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], }), alias, }) @@ -12954,7 +10112,7 @@ impl<'a> Parser<'a> { self.expect_token(&Token::RParen)?; let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]); - let alias = match self.maybe_parse_table_alias() { + let alias = match self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS) { Ok(Some(alias)) => Some(alias), Ok(None) => None, Err(e) => return Err(e), @@ -12985,33 +10143,22 @@ impl<'a> Parser<'a> { } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) { let json_expr = self.parse_expr()?; self.expect_token(&Token::Comma)?; - let json_path = self.parse_value()?.value; - self.expect_keyword_is(Keyword::COLUMNS)?; + let json_path = self.parse_value()?; + self.expect_keyword(Keyword::COLUMNS)?; self.expect_token(&Token::LParen)?; let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?; self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?; - let alias = self.maybe_parse_table_alias()?; + let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; Ok(TableFactor::JsonTable { json_expr, json_path, columns, alias, }) - } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) { - self.prev_token(); - self.parse_open_json_table_factor() - } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) { - self.prev_token(); - self.parse_xml_table_factor() } else { let name = self.parse_object_name(true)?; - let json_path = match self.peek_token().token { - Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?), - _ => None, - }; - let partitions: Vec = if dialect_of!(self is MySqlDialect | GenericDialect) && self.parse_keyword(Keyword::PARTITION) { @@ -13021,7 +10168,7 @@ impl<'a> Parser<'a> { }; // Parse potential version qualifier - let version = self.maybe_parse_table_version()?; + let version = self.parse_table_version()?; // Postgres, MSSQL, ClickHouse: table-valued functions: let args = if self.consume_token(&Token::LParen) { @@ -13032,22 +10179,7 @@ impl<'a> Parser<'a> { let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]); - let mut sample = None; - if self.dialect.supports_table_sample_before_alias() { - if let Some(parsed_sample) = self.maybe_parse_table_sample()? { - sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample)); - } - } - - let alias = self.maybe_parse_table_alias()?; - - // MYSQL-specific table hints: - let index_hints = if self.dialect.supports_table_hints() { - self.maybe_parse(|p| p.parse_table_index_hints())? - .unwrap_or(vec![]) - } else { - vec![] - }; + let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; // MSSQL-specific table hints: let mut with_hints = vec![]; @@ -13061,12 +10193,6 @@ impl<'a> Parser<'a> { } }; - if !self.dialect.supports_table_sample_before_alias() { - if let Some(parsed_sample) = self.maybe_parse_table_sample()? { - sample = Some(TableSampleKind::AfterTableAlias(parsed_sample)); - } - } - let mut table = TableFactor::Table { name, alias, @@ -13075,9 +10201,6 @@ impl<'a> Parser<'a> { version, partitions, with_ordinality, - json_path, - sample, - index_hints, }; while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) { @@ -13098,243 +10221,6 @@ impl<'a> Parser<'a> { } } - fn maybe_parse_table_sample(&mut self) -> Result>, ParserError> { - let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) { - TableSampleModifier::TableSample - } else if self.parse_keyword(Keyword::SAMPLE) { - TableSampleModifier::Sample - } else { - return Ok(None); - }; - self.parse_table_sample(modifier).map(Some) - } - - fn parse_table_sample( - &mut self, - modifier: TableSampleModifier, - ) -> Result, ParserError> { - let name = match self.parse_one_of_keywords(&[ - Keyword::BERNOULLI, - Keyword::ROW, - Keyword::SYSTEM, - Keyword::BLOCK, - ]) { - Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli), - Some(Keyword::ROW) => Some(TableSampleMethod::Row), - Some(Keyword::SYSTEM) => Some(TableSampleMethod::System), - Some(Keyword::BLOCK) => Some(TableSampleMethod::Block), - _ => None, - }; - - let parenthesized = self.consume_token(&Token::LParen); - - let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) { - let selected_bucket = self.parse_number_value()?.value; - self.expect_keywords(&[Keyword::OUT, Keyword::OF])?; - let total = self.parse_number_value()?.value; - let on = if self.parse_keyword(Keyword::ON) { - Some(self.parse_expr()?) - } else { - None - }; - ( - None, - Some(TableSampleBucket { - bucket: selected_bucket, - total, - on, - }), - ) - } else { - let value = match self.maybe_parse(|p| p.parse_expr())? { - Some(num) => num, - None => { - let next_token = self.next_token(); - if let Token::Word(w) = next_token.token { - Expr::Value(Value::Placeholder(w.value).with_span(next_token.span)) - } else { - return parser_err!( - "Expecting number or byte length e.g. 100M", - self.peek_token().span.start - ); - } - } - }; - let unit = if self.parse_keyword(Keyword::ROWS) { - Some(TableSampleUnit::Rows) - } else if self.parse_keyword(Keyword::PERCENT) { - Some(TableSampleUnit::Percent) - } else { - None - }; - ( - Some(TableSampleQuantity { - parenthesized, - value, - unit, - }), - None, - ) - }; - if parenthesized { - self.expect_token(&Token::RParen)?; - } - - let seed = if self.parse_keyword(Keyword::REPEATABLE) { - Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?) - } else if self.parse_keyword(Keyword::SEED) { - Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?) - } else { - None - }; - - let offset = if self.parse_keyword(Keyword::OFFSET) { - Some(self.parse_expr()?) - } else { - None - }; - - Ok(Box::new(TableSample { - modifier, - name, - quantity, - seed, - bucket, - offset, - })) - } - - fn parse_table_sample_seed( - &mut self, - modifier: TableSampleSeedModifier, - ) -> Result { - self.expect_token(&Token::LParen)?; - let value = self.parse_number_value()?.value; - self.expect_token(&Token::RParen)?; - Ok(TableSampleSeed { modifier, value }) - } - - /// Parses `OPENJSON( jsonExpression [ , path ] ) [ ]` clause, - /// assuming the `OPENJSON` keyword was already consumed. - fn parse_open_json_table_factor(&mut self) -> Result { - self.expect_token(&Token::LParen)?; - let json_expr = self.parse_expr()?; - let json_path = if self.consume_token(&Token::Comma) { - Some(self.parse_value()?.value) - } else { - None - }; - self.expect_token(&Token::RParen)?; - let columns = if self.parse_keyword(Keyword::WITH) { - self.expect_token(&Token::LParen)?; - let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?; - self.expect_token(&Token::RParen)?; - columns - } else { - Vec::new() - }; - let alias = self.maybe_parse_table_alias()?; - Ok(TableFactor::OpenJsonTable { - json_expr, - json_path, - columns, - alias, - }) - } - - fn parse_xml_table_factor(&mut self) -> Result { - self.expect_token(&Token::LParen)?; - let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) { - self.expect_token(&Token::LParen)?; - let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?; - self.expect_token(&Token::RParen)?; - self.expect_token(&Token::Comma)?; - namespaces - } else { - vec![] - }; - let row_expression = self.parse_expr()?; - let passing = self.parse_xml_passing_clause()?; - self.expect_keyword_is(Keyword::COLUMNS)?; - let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?; - self.expect_token(&Token::RParen)?; - let alias = self.maybe_parse_table_alias()?; - Ok(TableFactor::XmlTable { - namespaces, - row_expression, - passing, - columns, - alias, - }) - } - - fn parse_xml_namespace_definition(&mut self) -> Result { - let uri = self.parse_expr()?; - self.expect_keyword_is(Keyword::AS)?; - let name = self.parse_identifier()?; - Ok(XmlNamespaceDefinition { uri, name }) - } - - fn parse_xml_table_column(&mut self) -> Result { - let name = self.parse_identifier()?; - - let option = if self.parse_keyword(Keyword::FOR) { - self.expect_keyword(Keyword::ORDINALITY)?; - XmlTableColumnOption::ForOrdinality - } else { - let r#type = self.parse_data_type()?; - let mut path = None; - let mut default = None; - - if self.parse_keyword(Keyword::PATH) { - path = Some(self.parse_expr()?); - } - - if self.parse_keyword(Keyword::DEFAULT) { - default = Some(self.parse_expr()?); - } - - let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]); - if !not_null { - // NULL is the default but can be specified explicitly - let _ = self.parse_keyword(Keyword::NULL); - } - - XmlTableColumnOption::NamedInfo { - r#type, - path, - default, - nullable: !not_null, - } - }; - Ok(XmlTableColumn { name, option }) - } - - fn parse_xml_passing_clause(&mut self) -> Result { - let mut arguments = vec![]; - if self.parse_keyword(Keyword::PASSING) { - loop { - let by_value = - self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok(); - let expr = self.parse_expr()?; - let alias = if self.parse_keyword(Keyword::AS) { - Some(self.parse_identifier()?) - } else { - None - }; - arguments.push(XmlPassingArgument { - expr, - alias, - by_value, - }); - if !self.consume_token(&Token::Comma) { - break; - } - } - } - Ok(XmlPassingClause { arguments }) - } - fn parse_match_recognize(&mut self, table: TableFactor) -> Result { self.expect_token(&Token::LParen)?; @@ -13354,7 +10240,7 @@ impl<'a> Parser<'a> { self.parse_comma_separated(|p| { let expr = p.parse_expr()?; let _ = p.parse_keyword(Keyword::AS); - let alias = p.parse_identifier()?; + let alias = p.parse_identifier(false)?; Ok(Measure { expr, alias }) })? } else { @@ -13400,9 +10286,9 @@ impl<'a> Parser<'a> { } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) { Some(AfterMatchSkip::ToNextRow) } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) { - Some(AfterMatchSkip::ToFirst(self.parse_identifier()?)) + Some(AfterMatchSkip::ToFirst(self.parse_identifier(false)?)) } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) { - Some(AfterMatchSkip::ToLast(self.parse_identifier()?)) + Some(AfterMatchSkip::ToLast(self.parse_identifier(false)?)) } else { let found = self.next_token(); return self.expected("after match skip option", found); @@ -13411,21 +10297,21 @@ impl<'a> Parser<'a> { None }; - self.expect_keyword_is(Keyword::PATTERN)?; + self.expect_keyword(Keyword::PATTERN)?; let pattern = self.parse_parenthesized(Self::parse_pattern)?; - self.expect_keyword_is(Keyword::DEFINE)?; + self.expect_keyword(Keyword::DEFINE)?; let symbols = self.parse_comma_separated(|p| { - let symbol = p.parse_identifier()?; - p.expect_keyword_is(Keyword::AS)?; + let symbol = p.parse_identifier(false)?; + p.expect_keyword(Keyword::AS)?; let definition = p.parse_expr()?; Ok(SymbolDefinition { symbol, definition }) })?; self.expect_token(&Token::RParen)?; - let alias = self.maybe_parse_table_alias()?; + let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; Ok(TableFactor::MatchRecognize { table: Box::new(table), @@ -13448,7 +10334,9 @@ impl<'a> Parser<'a> { } Token::LBrace => { self.expect_token(&Token::Minus)?; - let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?; + let symbol = self + .parse_identifier(false) + .map(MatchRecognizeSymbol::Named)?; self.expect_token(&Token::Minus)?; self.expect_token(&Token::RBrace)?; Ok(MatchRecognizePattern::Exclude(symbol)) @@ -13460,7 +10348,7 @@ impl<'a> Parser<'a> { }) if value == "PERMUTE" => { self.expect_token(&Token::LParen)?; let symbols = self.parse_comma_separated(|p| { - p.parse_identifier().map(MatchRecognizeSymbol::Named) + p.parse_identifier(false).map(MatchRecognizeSymbol::Named) })?; self.expect_token(&Token::RParen)?; Ok(MatchRecognizePattern::Permute(symbols)) @@ -13472,7 +10360,7 @@ impl<'a> Parser<'a> { } _ => { self.prev_token(); - self.parse_identifier() + self.parse_identifier(false) .map(MatchRecognizeSymbol::Named) .map(MatchRecognizePattern::Symbol) } @@ -13497,7 +10385,7 @@ impl<'a> Parser<'a> { return self.expected("literal number", next_token); }; self.expect_token(&Token::RBrace)?; - RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?) + RepetitionQuantifier::AtMost(Self::parse(n, token.location)?) } Token::Number(n, _) if self.consume_token(&Token::Comma) => { let next_token = self.next_token(); @@ -13505,12 +10393,12 @@ impl<'a> Parser<'a> { Token::Number(m, _) => { self.expect_token(&Token::RBrace)?; RepetitionQuantifier::Range( - Self::parse(n, token.span.start)?, - Self::parse(m, token.span.start)?, + Self::parse(n, token.location)?, + Self::parse(m, token.location)?, ) } Token::RBrace => { - RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?) + RepetitionQuantifier::AtLeast(Self::parse(n, token.location)?) } _ => { return self.expected("} or upper bound", next_token); @@ -13519,7 +10407,7 @@ impl<'a> Parser<'a> { } Token::Number(n, _) => { self.expect_token(&Token::RBrace)?; - RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?) + RepetitionQuantifier::Exactly(Self::parse(n, token.location)?) } _ => return self.expected("quantifier range", token), } @@ -13561,20 +10449,18 @@ impl<'a> Parser<'a> { } } - /// Parses a the timestamp version specifier (i.e. query historical data) - pub fn maybe_parse_table_version(&mut self) -> Result, ParserError> { - if self.dialect.supports_timestamp_versioning() { - if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF]) - { - let expr = self.parse_expr()?; - return Ok(Some(TableVersion::ForSystemTimeAsOf(expr))); - } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) { - let func_name = self.parse_object_name(true)?; - let func = self.parse_function(func_name)?; - return Ok(Some(TableVersion::Function(func))); - } + /// Parse a given table version specifier. + /// + /// For now it only supports timestamp versioning for BigQuery and MSSQL dialects. + pub fn parse_table_version(&mut self) -> Result, ParserError> { + if dialect_of!(self is BigQueryDialect | MsSqlDialect) + && self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF]) + { + let expr = self.parse_expr()?; + Ok(Some(TableVersion::ForSystemTimeAsOf(expr))) + } else { + Ok(None) } - Ok(None) } /// Parses MySQL's JSON_TABLE column definition. @@ -13582,8 +10468,8 @@ impl<'a> Parser<'a> { pub fn parse_json_table_column_def(&mut self) -> Result { if self.parse_keyword(Keyword::NESTED) { let _has_path_keyword = self.parse_keyword(Keyword::PATH); - let path = self.parse_value()?.value; - self.expect_keyword_is(Keyword::COLUMNS)?; + let path = self.parse_value()?; + self.expect_keyword(Keyword::COLUMNS)?; let columns = self.parse_parenthesized(|p| { p.parse_comma_separated(Self::parse_json_table_column_def) })?; @@ -13592,22 +10478,22 @@ impl<'a> Parser<'a> { columns, })); } - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; if self.parse_keyword(Keyword::FOR) { - self.expect_keyword_is(Keyword::ORDINALITY)?; + self.expect_keyword(Keyword::ORDINALITY)?; return Ok(JsonTableColumn::ForOrdinality(name)); } let r#type = self.parse_data_type()?; let exists = self.parse_keyword(Keyword::EXISTS); - self.expect_keyword_is(Keyword::PATH)?; - let path = self.parse_value()?.value; + self.expect_keyword(Keyword::PATH)?; + let path = self.parse_value()?; let mut on_empty = None; let mut on_error = None; while let Some(error_handling) = self.parse_json_table_column_error_handling()? { if self.parse_keyword(Keyword::EMPTY) { on_empty = Some(error_handling); } else { - self.expect_keyword_is(Keyword::ERROR)?; + self.expect_keyword(Keyword::ERROR)?; on_error = Some(error_handling); } } @@ -13621,34 +10507,6 @@ impl<'a> Parser<'a> { })) } - /// Parses MSSQL's `OPENJSON WITH` column definition. - /// - /// ```sql - /// colName type [ column_path ] [ AS JSON ] - /// ``` - /// - /// Reference: - pub fn parse_openjson_table_column_def(&mut self) -> Result { - let name = self.parse_identifier()?; - let r#type = self.parse_data_type()?; - let path = if let Token::SingleQuotedString(path) = self.peek_token().token { - self.next_token(); - Some(path) - } else { - None - }; - let as_json = self.parse_keyword(Keyword::AS); - if as_json { - self.expect_keyword_is(Keyword::JSON)?; - } - Ok(OpenJsonTableColumn { - name, - r#type, - path, - as_json, - }) - } - fn parse_json_table_column_error_handling( &mut self, ) -> Result, ParserError> { @@ -13657,11 +10515,11 @@ impl<'a> Parser<'a> { } else if self.parse_keyword(Keyword::ERROR) { JsonTableColumnErrorHandling::Error } else if self.parse_keyword(Keyword::DEFAULT) { - JsonTableColumnErrorHandling::Default(self.parse_value()?.value) + JsonTableColumnErrorHandling::Default(self.parse_value()?) } else { return Ok(None); }; - self.expect_keyword_is(Keyword::ON)?; + self.expect_keyword(Keyword::ON)?; Ok(Some(res)) } @@ -13671,7 +10529,7 @@ impl<'a> Parser<'a> { ) -> Result { let subquery = self.parse_query()?; self.expect_token(&Token::RParen)?; - let alias = self.maybe_parse_table_alias()?; + let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; Ok(TableFactor::Derived { lateral: match lateral { Lateral => true, @@ -13687,9 +10545,9 @@ impl<'a> Parser<'a> { Token::Word(w) => Ok(w.value), _ => self.expected("a function identifier", self.peek_token()), }?; - let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?; + let expr = self.parse_function(ObjectName(vec![Ident::new(function_name)]))?; let alias = if self.parse_keyword(Keyword::AS) { - Some(self.parse_identifier()?) + Some(self.parse_identifier(false)?) } else { None }; @@ -13697,12 +10555,13 @@ impl<'a> Parser<'a> { Ok(ExprWithAlias { expr, alias }) } /// Parses an expression with an optional alias - /// + /// Examples: - /// + /// ```sql /// SUM(price) AS total_price /// ``` + /// ```sql /// SUM(price) /// ``` @@ -13718,10 +10577,11 @@ impl<'a> Parser<'a> { /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value)); /// # Ok(()) /// # } + pub fn parse_expr_with_alias(&mut self) -> Result { let expr = self.parse_expr()?; let alias = if self.parse_keyword(Keyword::AS) { - Some(self.parse_identifier()?) + Some(self.parse_identifier(false)?) } else { None }; @@ -13735,9 +10595,9 @@ impl<'a> Parser<'a> { ) -> Result { self.expect_token(&Token::LParen)?; let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?; - self.expect_keyword_is(Keyword::FOR)?; - let value_column = self.parse_period_separated(|p| p.parse_identifier())?; - self.expect_keyword_is(Keyword::IN)?; + self.expect_keyword(Keyword::FOR)?; + let value_column = self.parse_object_name(false)?.0; + self.expect_keyword(Keyword::IN)?; self.expect_token(&Token::LParen)?; let value_source = if self.parse_keyword(Keyword::ANY) { @@ -13765,7 +10625,7 @@ impl<'a> Parser<'a> { }; self.expect_token(&Token::RParen)?; - let alias = self.maybe_parse_table_alias()?; + let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; Ok(TableFactor::Pivot { table: Box::new(table), aggregate_functions, @@ -13780,27 +10640,17 @@ impl<'a> Parser<'a> { &mut self, table: TableFactor, ) -> Result { - let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) { - self.expect_keyword_is(Keyword::NULLS)?; - Some(NullInclusion::IncludeNulls) - } else if self.parse_keyword(Keyword::EXCLUDE) { - self.expect_keyword_is(Keyword::NULLS)?; - Some(NullInclusion::ExcludeNulls) - } else { - None - }; self.expect_token(&Token::LParen)?; - let value = self.parse_identifier()?; - self.expect_keyword_is(Keyword::FOR)?; - let name = self.parse_identifier()?; - self.expect_keyword_is(Keyword::IN)?; + let value = self.parse_identifier(false)?; + self.expect_keyword(Keyword::FOR)?; + let name = self.parse_identifier(false)?; + self.expect_keyword(Keyword::IN)?; let columns = self.parse_parenthesized_column_list(Mandatory, false)?; self.expect_token(&Token::RParen)?; - let alias = self.maybe_parse_table_alias()?; + let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; Ok(TableFactor::Unpivot { table: Box::new(table), value, - null_inclusion, name, columns, alias, @@ -13814,7 +10664,7 @@ impl<'a> Parser<'a> { let constraint = self.parse_expr()?; Ok(JoinConstraint::On(constraint)) } else if self.parse_keyword(Keyword::USING) { - let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?; + let columns = self.parse_parenthesized_column_list(Mandatory, false)?; Ok(JoinConstraint::Using(columns)) } else { Ok(JoinConstraint::None) @@ -13824,625 +10674,152 @@ impl<'a> Parser<'a> { /// Parse a GRANT statement. pub fn parse_grant(&mut self) -> Result { - let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?; + let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?; - self.expect_keyword_is(Keyword::TO)?; - let grantees = self.parse_grantees()?; + self.expect_keyword(Keyword::TO)?; + let grantees = self.parse_comma_separated(|p| p.parse_identifier(false))?; let with_grant_option = self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]); - let current_grants = - if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) { - Some(CurrentGrantsKind::CopyCurrentGrants) - } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) { - Some(CurrentGrantsKind::RevokeCurrentGrants) - } else { - None - }; - - let as_grantor = if self.parse_keywords(&[Keyword::AS]) { - Some(self.parse_identifier()?) - } else { - None - }; - - let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) { - Some(self.parse_identifier()?) - } else { - None - }; + let granted_by = self + .parse_keywords(&[Keyword::GRANTED, Keyword::BY]) + .then(|| self.parse_identifier(false).unwrap()); Ok(Statement::Grant { privileges, objects, grantees, with_grant_option, - as_grantor, granted_by, - current_grants, }) } - fn parse_grantees(&mut self) -> Result, ParserError> { - let mut values = vec![]; - let mut grantee_type = GranteesType::None; - loop { - let new_grantee_type = if self.parse_keyword(Keyword::ROLE) { - GranteesType::Role - } else if self.parse_keyword(Keyword::USER) { - GranteesType::User - } else if self.parse_keyword(Keyword::SHARE) { - GranteesType::Share - } else if self.parse_keyword(Keyword::GROUP) { - GranteesType::Group - } else if self.parse_keyword(Keyword::PUBLIC) { - GranteesType::Public - } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) { - GranteesType::DatabaseRole - } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) { - GranteesType::ApplicationRole - } else if self.parse_keyword(Keyword::APPLICATION) { - GranteesType::Application - } else { - grantee_type.clone() // keep from previous iteraton, if not specified - }; - - if self - .dialect - .get_reserved_grantees_types() - .contains(&new_grantee_type) - { - self.prev_token(); - } else { - grantee_type = new_grantee_type; - } - - let grantee = if grantee_type == GranteesType::Public { - Grantee { - grantee_type: grantee_type.clone(), - name: None, - } - } else { - let mut name = self.parse_grantee_name()?; - if self.consume_token(&Token::Colon) { - // Redshift supports namespace prefix for external users and groups: - // : or : - // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html - let ident = self.parse_identifier()?; - if let GranteeName::ObjectName(namespace) = name { - name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new( - format!("{namespace}:{ident}"), - )])); - }; - } - Grantee { - grantee_type: grantee_type.clone(), - name: Some(name), - } - }; - - values.push(grantee); - - if !self.consume_token(&Token::Comma) { - break; - } - } - - Ok(values) - } - - pub fn parse_grant_deny_revoke_privileges_objects( + pub fn parse_grant_revoke_privileges_objects( &mut self, - ) -> Result<(Privileges, Option), ParserError> { + ) -> Result<(Privileges, GrantObjects), ParserError> { let privileges = if self.parse_keyword(Keyword::ALL) { Privileges::All { with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES), } } else { - let actions = self.parse_actions_list()?; - Privileges::Actions(actions) + let (actions, err): (Vec<_>, Vec<_>) = self + .parse_actions_list()? + .into_iter() + .map(|(kw, columns)| match kw { + Keyword::DELETE => Ok(Action::Delete), + Keyword::INSERT => Ok(Action::Insert { columns }), + Keyword::REFERENCES => Ok(Action::References { columns }), + Keyword::SELECT => Ok(Action::Select { columns }), + Keyword::TRIGGER => Ok(Action::Trigger), + Keyword::TRUNCATE => Ok(Action::Truncate), + Keyword::UPDATE => Ok(Action::Update { columns }), + Keyword::USAGE => Ok(Action::Usage), + Keyword::CONNECT => Ok(Action::Connect), + Keyword::CREATE => Ok(Action::Create), + Keyword::EXECUTE => Ok(Action::Execute), + Keyword::TEMPORARY => Ok(Action::Temporary), + // This will cover all future added keywords to + // parse_grant_permission and unhandled in this + // match + _ => Err(kw), + }) + .partition(Result::is_ok); + + if !err.is_empty() { + let errors: Vec = err.into_iter().filter_map(|x| x.err()).collect(); + return Err(ParserError::ParserError(format!( + "INTERNAL ERROR: GRANT/REVOKE unexpected keyword(s) - {errors:?}" + ))); + } + let act = actions.into_iter().filter_map(|x| x.ok()).collect(); + Privileges::Actions(act) }; - let objects = if self.parse_keyword(Keyword::ON) { - if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) { - Some(GrantObjects::AllTablesInSchema { - schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, - }) - } else if self.parse_keywords(&[ - Keyword::ALL, - Keyword::EXTERNAL, - Keyword::TABLES, - Keyword::IN, - Keyword::SCHEMA, - ]) { - Some(GrantObjects::AllExternalTablesInSchema { - schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, - }) - } else if self.parse_keywords(&[ - Keyword::ALL, - Keyword::VIEWS, - Keyword::IN, - Keyword::SCHEMA, - ]) { - Some(GrantObjects::AllViewsInSchema { - schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, - }) - } else if self.parse_keywords(&[ - Keyword::ALL, - Keyword::MATERIALIZED, - Keyword::VIEWS, - Keyword::IN, - Keyword::SCHEMA, - ]) { - Some(GrantObjects::AllMaterializedViewsInSchema { - schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, - }) - } else if self.parse_keywords(&[ - Keyword::FUTURE, - Keyword::SCHEMAS, - Keyword::IN, - Keyword::DATABASE, - ]) { - Some(GrantObjects::FutureSchemasInDatabase { - databases: self.parse_comma_separated(|p| p.parse_object_name(false))?, - }) - } else if self.parse_keywords(&[ - Keyword::FUTURE, - Keyword::TABLES, - Keyword::IN, - Keyword::SCHEMA, - ]) { - Some(GrantObjects::FutureTablesInSchema { - schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, - }) - } else if self.parse_keywords(&[ - Keyword::FUTURE, - Keyword::EXTERNAL, - Keyword::TABLES, - Keyword::IN, - Keyword::SCHEMA, - ]) { - Some(GrantObjects::FutureExternalTablesInSchema { - schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, - }) - } else if self.parse_keywords(&[ - Keyword::FUTURE, - Keyword::VIEWS, - Keyword::IN, - Keyword::SCHEMA, - ]) { - Some(GrantObjects::FutureViewsInSchema { - schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, - }) - } else if self.parse_keywords(&[ - Keyword::FUTURE, - Keyword::MATERIALIZED, - Keyword::VIEWS, - Keyword::IN, - Keyword::SCHEMA, - ]) { - Some(GrantObjects::FutureMaterializedViewsInSchema { - schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, - }) - } else if self.parse_keywords(&[ - Keyword::ALL, - Keyword::SEQUENCES, - Keyword::IN, - Keyword::SCHEMA, - ]) { - Some(GrantObjects::AllSequencesInSchema { - schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, - }) - } else if self.parse_keywords(&[ - Keyword::FUTURE, - Keyword::SEQUENCES, - Keyword::IN, - Keyword::SCHEMA, - ]) { - Some(GrantObjects::FutureSequencesInSchema { - schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, - }) - } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) { - Some(GrantObjects::ResourceMonitors( - self.parse_comma_separated(|p| p.parse_object_name(false))?, - )) - } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) { - Some(GrantObjects::ComputePools( - self.parse_comma_separated(|p| p.parse_object_name(false))?, - )) - } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) { - Some(GrantObjects::FailoverGroup( - self.parse_comma_separated(|p| p.parse_object_name(false))?, - )) - } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) { - Some(GrantObjects::ReplicationGroup( - self.parse_comma_separated(|p| p.parse_object_name(false))?, - )) - } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) { - Some(GrantObjects::ExternalVolumes( - self.parse_comma_separated(|p| p.parse_object_name(false))?, - )) - } else { - let object_type = self.parse_one_of_keywords(&[ - Keyword::SEQUENCE, - Keyword::DATABASE, - Keyword::SCHEMA, - Keyword::TABLE, - Keyword::VIEW, - Keyword::WAREHOUSE, - Keyword::INTEGRATION, - Keyword::VIEW, - Keyword::WAREHOUSE, - Keyword::INTEGRATION, - Keyword::USER, - Keyword::CONNECTION, - Keyword::PROCEDURE, - Keyword::FUNCTION, - ]); - let objects = - self.parse_comma_separated(|p| p.parse_object_name_inner(false, true)); - match object_type { - Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)), - Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)), - Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)), - Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)), - Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)), - Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)), - Some(Keyword::USER) => Some(GrantObjects::Users(objects?)), - Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)), - kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => { - if let Some(name) = objects?.first() { - self.parse_grant_procedure_or_function(name, &kw)? - } else { - self.expected("procedure or function name", self.peek_token())? - } - } - Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)), - _ => unreachable!(), - } + self.expect_keyword(Keyword::ON)?; + + let objects = if self.parse_keywords(&[ + Keyword::ALL, + Keyword::TABLES, + Keyword::IN, + Keyword::SCHEMA, + ]) { + GrantObjects::AllTablesInSchema { + schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, + } + } else if self.parse_keywords(&[ + Keyword::ALL, + Keyword::SEQUENCES, + Keyword::IN, + Keyword::SCHEMA, + ]) { + GrantObjects::AllSequencesInSchema { + schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, } } else { - None + let object_type = + self.parse_one_of_keywords(&[Keyword::SEQUENCE, Keyword::SCHEMA, Keyword::TABLE]); + let objects = self.parse_comma_separated(|p| p.parse_object_name(false)); + match object_type { + Some(Keyword::SCHEMA) => GrantObjects::Schemas(objects?), + Some(Keyword::SEQUENCE) => GrantObjects::Sequences(objects?), + Some(Keyword::TABLE) | None => GrantObjects::Tables(objects?), + _ => unreachable!(), + } }; Ok((privileges, objects)) } - fn parse_grant_procedure_or_function( - &mut self, - name: &ObjectName, - kw: &Option, - ) -> Result, ParserError> { - let arg_types = if self.consume_token(&Token::LParen) { - let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?; - self.expect_token(&Token::RParen)?; - list - } else { - vec![] - }; - match kw { - Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure { - name: name.clone(), - arg_types, - })), - Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function { - name: name.clone(), - arg_types, - })), - _ => self.expected("procedure or function keywords", self.peek_token())?, - } - } - - pub fn parse_grant_permission(&mut self) -> Result { - fn parse_columns(parser: &mut Parser) -> Result>, ParserError> { - let columns = parser.parse_parenthesized_column_list(Optional, false)?; - if columns.is_empty() { - Ok(None) - } else { - Ok(Some(columns)) - } - } - - // Multi-word privileges - if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) { - Ok(Action::ImportedPrivileges) - } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) { - Ok(Action::AddSearchOptimization) - } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) { - Ok(Action::AttachListing) - } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) { - Ok(Action::AttachPolicy) - } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) { - Ok(Action::BindServiceEndpoint) - } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) { - let role = self.parse_object_name(false)?; - Ok(Action::DatabaseRole { role }) - } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) { - Ok(Action::EvolveSchema) - } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) { - Ok(Action::ImportShare) - } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) { - Ok(Action::ManageVersions) - } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) { - Ok(Action::ManageReleases) - } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) { - Ok(Action::OverrideShareRestrictions) - } else if self.parse_keywords(&[ - Keyword::PURCHASE, - Keyword::DATA, - Keyword::EXCHANGE, - Keyword::LISTING, + pub fn parse_grant_permission(&mut self) -> Result { + if let Some(kw) = self.parse_one_of_keywords(&[ + Keyword::CONNECT, + Keyword::CREATE, + Keyword::DELETE, + Keyword::EXECUTE, + Keyword::INSERT, + Keyword::REFERENCES, + Keyword::SELECT, + Keyword::TEMPORARY, + Keyword::TRIGGER, + Keyword::TRUNCATE, + Keyword::UPDATE, + Keyword::USAGE, ]) { - Ok(Action::PurchaseDataExchangeListing) - } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) { - Ok(Action::ResolveAll) - } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) { - Ok(Action::ReadSession) - - // Single-word privileges - } else if self.parse_keyword(Keyword::APPLY) { - let apply_type = self.parse_action_apply_type()?; - Ok(Action::Apply { apply_type }) - } else if self.parse_keyword(Keyword::APPLYBUDGET) { - Ok(Action::ApplyBudget) - } else if self.parse_keyword(Keyword::AUDIT) { - Ok(Action::Audit) - } else if self.parse_keyword(Keyword::CONNECT) { - Ok(Action::Connect) - } else if self.parse_keyword(Keyword::CREATE) { - let obj_type = self.maybe_parse_action_create_object_type(); - Ok(Action::Create { obj_type }) - } else if self.parse_keyword(Keyword::DELETE) { - Ok(Action::Delete) - } else if self.parse_keyword(Keyword::EXEC) { - let obj_type = self.maybe_parse_action_execute_obj_type(); - Ok(Action::Exec { obj_type }) - } else if self.parse_keyword(Keyword::EXECUTE) { - let obj_type = self.maybe_parse_action_execute_obj_type(); - Ok(Action::Execute { obj_type }) - } else if self.parse_keyword(Keyword::FAILOVER) { - Ok(Action::Failover) - } else if self.parse_keyword(Keyword::INSERT) { - Ok(Action::Insert { - columns: parse_columns(self)?, - }) - } else if self.parse_keyword(Keyword::MANAGE) { - let manage_type = self.parse_action_manage_type()?; - Ok(Action::Manage { manage_type }) - } else if self.parse_keyword(Keyword::MODIFY) { - let modify_type = self.parse_action_modify_type(); - Ok(Action::Modify { modify_type }) - } else if self.parse_keyword(Keyword::MONITOR) { - let monitor_type = self.parse_action_monitor_type(); - Ok(Action::Monitor { monitor_type }) - } else if self.parse_keyword(Keyword::OPERATE) { - Ok(Action::Operate) - } else if self.parse_keyword(Keyword::REFERENCES) { - Ok(Action::References { - columns: parse_columns(self)?, - }) - } else if self.parse_keyword(Keyword::READ) { - Ok(Action::Read) - } else if self.parse_keyword(Keyword::REPLICATE) { - Ok(Action::Replicate) - } else if self.parse_keyword(Keyword::ROLE) { - let role = self.parse_identifier()?; - Ok(Action::Role { role }) - } else if self.parse_keyword(Keyword::SELECT) { - Ok(Action::Select { - columns: parse_columns(self)?, - }) - } else if self.parse_keyword(Keyword::TEMPORARY) { - Ok(Action::Temporary) - } else if self.parse_keyword(Keyword::TRIGGER) { - Ok(Action::Trigger) - } else if self.parse_keyword(Keyword::TRUNCATE) { - Ok(Action::Truncate) - } else if self.parse_keyword(Keyword::UPDATE) { - Ok(Action::Update { - columns: parse_columns(self)?, - }) - } else if self.parse_keyword(Keyword::USAGE) { - Ok(Action::Usage) - } else if self.parse_keyword(Keyword::OWNERSHIP) { - Ok(Action::Ownership) + let columns = match kw { + Keyword::INSERT | Keyword::REFERENCES | Keyword::SELECT | Keyword::UPDATE => { + let columns = self.parse_parenthesized_column_list(Optional, false)?; + if columns.is_empty() { + None + } else { + Some(columns) + } + } + _ => None, + }; + Ok((kw, columns)) } else { self.expected("a privilege keyword", self.peek_token())? } } - fn maybe_parse_action_create_object_type(&mut self) -> Option { - // Multi-word object types - if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) { - Some(ActionCreateObjectType::ApplicationPackage) - } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) { - Some(ActionCreateObjectType::ComputePool) - } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) { - Some(ActionCreateObjectType::DataExchangeListing) - } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) { - Some(ActionCreateObjectType::ExternalVolume) - } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) { - Some(ActionCreateObjectType::FailoverGroup) - } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) { - Some(ActionCreateObjectType::NetworkPolicy) - } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) { - Some(ActionCreateObjectType::OrganiationListing) - } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) { - Some(ActionCreateObjectType::ReplicationGroup) - } - // Single-word object types - else if self.parse_keyword(Keyword::ACCOUNT) { - Some(ActionCreateObjectType::Account) - } else if self.parse_keyword(Keyword::APPLICATION) { - Some(ActionCreateObjectType::Application) - } else if self.parse_keyword(Keyword::DATABASE) { - Some(ActionCreateObjectType::Database) - } else if self.parse_keyword(Keyword::INTEGRATION) { - Some(ActionCreateObjectType::Integration) - } else if self.parse_keyword(Keyword::ROLE) { - Some(ActionCreateObjectType::Role) - } else if self.parse_keyword(Keyword::SHARE) { - Some(ActionCreateObjectType::Share) - } else if self.parse_keyword(Keyword::USER) { - Some(ActionCreateObjectType::User) - } else if self.parse_keyword(Keyword::WAREHOUSE) { - Some(ActionCreateObjectType::Warehouse) - } else { - None - } - } - - fn parse_action_apply_type(&mut self) -> Result { - if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) { - Ok(ActionApplyType::AggregationPolicy) - } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) { - Ok(ActionApplyType::AuthenticationPolicy) - } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) { - Ok(ActionApplyType::JoinPolicy) - } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) { - Ok(ActionApplyType::MaskingPolicy) - } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) { - Ok(ActionApplyType::PackagesPolicy) - } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) { - Ok(ActionApplyType::PasswordPolicy) - } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) { - Ok(ActionApplyType::ProjectionPolicy) - } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) { - Ok(ActionApplyType::RowAccessPolicy) - } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) { - Ok(ActionApplyType::SessionPolicy) - } else if self.parse_keyword(Keyword::TAG) { - Ok(ActionApplyType::Tag) - } else { - self.expected("GRANT APPLY type", self.peek_token()) - } - } - - fn maybe_parse_action_execute_obj_type(&mut self) -> Option { - if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) { - Some(ActionExecuteObjectType::DataMetricFunction) - } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) { - Some(ActionExecuteObjectType::ManagedAlert) - } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) { - Some(ActionExecuteObjectType::ManagedTask) - } else if self.parse_keyword(Keyword::ALERT) { - Some(ActionExecuteObjectType::Alert) - } else if self.parse_keyword(Keyword::TASK) { - Some(ActionExecuteObjectType::Task) - } else { - None - } - } - - fn parse_action_manage_type(&mut self) -> Result { - if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) { - Ok(ActionManageType::AccountSupportCases) - } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) { - Ok(ActionManageType::EventSharing) - } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) { - Ok(ActionManageType::ListingAutoFulfillment) - } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) { - Ok(ActionManageType::OrganizationSupportCases) - } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) { - Ok(ActionManageType::UserSupportCases) - } else if self.parse_keyword(Keyword::GRANTS) { - Ok(ActionManageType::Grants) - } else if self.parse_keyword(Keyword::WAREHOUSES) { - Ok(ActionManageType::Warehouses) - } else { - self.expected("GRANT MANAGE type", self.peek_token()) - } - } - - fn parse_action_modify_type(&mut self) -> Option { - if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) { - Some(ActionModifyType::LogLevel) - } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) { - Some(ActionModifyType::TraceLevel) - } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) { - Some(ActionModifyType::SessionLogLevel) - } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) { - Some(ActionModifyType::SessionTraceLevel) - } else { - None - } - } - - fn parse_action_monitor_type(&mut self) -> Option { - if self.parse_keyword(Keyword::EXECUTION) { - Some(ActionMonitorType::Execution) - } else if self.parse_keyword(Keyword::SECURITY) { - Some(ActionMonitorType::Security) - } else if self.parse_keyword(Keyword::USAGE) { - Some(ActionMonitorType::Usage) - } else { - None - } - } - - pub fn parse_grantee_name(&mut self) -> Result { - let mut name = self.parse_object_name(false)?; - if self.dialect.supports_user_host_grantee() - && name.0.len() == 1 - && name.0[0].as_ident().is_some() - && self.consume_token(&Token::AtSign) - { - let user = name.0.pop().unwrap().as_ident().unwrap().clone(); - let host = self.parse_identifier()?; - Ok(GranteeName::UserHost { user, host }) - } else { - Ok(GranteeName::ObjectName(name)) - } - } - - /// Parse [`Statement::Deny`] - pub fn parse_deny(&mut self) -> Result { - self.expect_keyword(Keyword::DENY)?; - - let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?; - let objects = match objects { - Some(o) => o, - None => { - return parser_err!( - "DENY statements must specify an object", - self.peek_token().span.start - ) - } - }; - - self.expect_keyword_is(Keyword::TO)?; - let grantees = self.parse_grantees()?; - let cascade = self.parse_cascade_option(); - let granted_by = if self.parse_keywords(&[Keyword::AS]) { - Some(self.parse_identifier()?) - } else { - None - }; - - Ok(Statement::Deny(DenyStatement { - privileges, - objects, - grantees, - cascade, - granted_by, - })) - } - /// Parse a REVOKE statement pub fn parse_revoke(&mut self) -> Result { - let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?; + let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?; - self.expect_keyword_is(Keyword::FROM)?; - let grantees = self.parse_grantees()?; + self.expect_keyword(Keyword::FROM)?; + let grantees = self.parse_comma_separated(|p| p.parse_identifier(false))?; - let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) { - Some(self.parse_identifier()?) - } else { - None - }; + let granted_by = self + .parse_keywords(&[Keyword::GRANTED, Keyword::BY]) + .then(|| self.parse_identifier(false).unwrap()); - let cascade = self.parse_cascade_option(); + let loc = self.peek_token().location; + let cascade = self.parse_keyword(Keyword::CASCADE); + let restrict = self.parse_keyword(Keyword::RESTRICT); + if cascade && restrict { + return parser_err!("Cannot specify both CASCADE and RESTRICT in REVOKE", loc); + } Ok(Statement::Revoke { privileges, @@ -14456,10 +10833,7 @@ impl<'a> Parser<'a> { /// Parse an REPLACE statement pub fn parse_replace(&mut self) -> Result { if !dialect_of!(self is MySqlDialect | GenericDialect) { - return parser_err!( - "Unsupported statement REPLACE", - self.peek_token().span.start - ); + return parser_err!("Unsupported statement REPLACE", self.peek_token().location); } let mut insert = self.parse_insert()?; @@ -14479,7 +10853,24 @@ impl<'a> Parser<'a> { /// Parse an INSERT statement pub fn parse_insert(&mut self) -> Result { - let or = self.parse_conflict_clause(); + let or = if !dialect_of!(self is SQLiteDialect) { + None + } else if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) { + Some(SqliteOnConflict::Replace) + } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) { + Some(SqliteOnConflict::Rollback) + } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) { + Some(SqliteOnConflict::Abort) + } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) { + Some(SqliteOnConflict::Fail) + } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) { + Some(SqliteOnConflict::Ignore) + } else if self.parse_keyword(Keyword::REPLACE) { + Some(SqliteOnConflict::Replace) + } else { + None + }; + let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) { None } else if self.parse_keyword(Keyword::LOW_PRIORITY) { @@ -14497,8 +10888,9 @@ impl<'a> Parser<'a> { let replace_into = false; - let overwrite = self.parse_keyword(Keyword::OVERWRITE); - let into = self.parse_keyword(Keyword::INTO); + let action = self.parse_one_of_keywords(&[Keyword::INTO, Keyword::OVERWRITE]); + let into = action == Some(Keyword::INTO); + let overwrite = action == Some(Keyword::OVERWRITE); let local = self.parse_keyword(Keyword::LOCAL); @@ -14520,23 +10912,21 @@ impl<'a> Parser<'a> { } else { // Hive lets you put table here regardless let table = self.parse_keyword(Keyword::TABLE); - let table_object = self.parse_table_object()?; + let table_name = self.parse_object_name(false)?; let table_alias = if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) { - Some(self.parse_identifier()?) + Some(self.parse_identifier(false)?) } else { None }; let is_mysql = dialect_of!(self is MySqlDialect); - let (columns, partitioned, after_columns, source, assignments) = if self - .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES]) - { - (vec![], None, vec![], None, vec![]) - } else { - let (columns, partitioned, after_columns) = if !self.peek_subquery_start() { + let (columns, partitioned, after_columns, source) = + if self.parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES]) { + (vec![], None, vec![], None) + } else { let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?; let partitioned = self.parse_insert_partition()?; @@ -14546,40 +10936,12 @@ impl<'a> Parser<'a> { } else { vec![] }; - (columns, partitioned, after_columns) - } else { - Default::default() + + let source = Some(self.parse_query()?); + + (columns, partitioned, after_columns, source) }; - let (source, assignments) = if self.peek_keyword(Keyword::FORMAT) - || self.peek_keyword(Keyword::SETTINGS) - { - (None, vec![]) - } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) { - (None, self.parse_comma_separated(Parser::parse_assignment)?) - } else { - (Some(self.parse_query()?), vec![]) - }; - - (columns, partitioned, after_columns, source, assignments) - }; - - let (format_clause, settings) = if self.dialect.supports_insert_format() { - // Settings always comes before `FORMAT` for ClickHouse: - // - let settings = self.parse_settings()?; - - let format = if self.parse_keyword(Keyword::FORMAT) { - Some(self.parse_input_format_clause()?) - } else { - None - }; - - (format, settings) - } else { - Default::default() - }; - let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect) && self.parse_keyword(Keyword::AS) { @@ -14606,12 +10968,12 @@ impl<'a> Parser<'a> { None }; - self.expect_keyword_is(Keyword::DO)?; + self.expect_keyword(Keyword::DO)?; let action = if self.parse_keyword(Keyword::NOTHING) { OnConflictAction::DoNothing } else { - self.expect_keyword_is(Keyword::UPDATE)?; - self.expect_keyword_is(Keyword::SET)?; + self.expect_keyword(Keyword::UPDATE)?; + self.expect_keyword(Keyword::SET)?; let assignments = self.parse_comma_separated(Parser::parse_assignment)?; let selection = if self.parse_keyword(Keyword::WHERE) { Some(self.parse_expr()?) @@ -14629,9 +10991,9 @@ impl<'a> Parser<'a> { action, })) } else { - self.expect_keyword_is(Keyword::DUPLICATE)?; - self.expect_keyword_is(Keyword::KEY)?; - self.expect_keyword_is(Keyword::UPDATE)?; + self.expect_keyword(Keyword::DUPLICATE)?; + self.expect_keyword(Keyword::KEY)?; + self.expect_keyword(Keyword::UPDATE)?; let l = self.parse_comma_separated(Parser::parse_assignment)?; Some(OnInsert::DuplicateKeyUpdate(l)) @@ -14648,7 +11010,7 @@ impl<'a> Parser<'a> { Ok(Statement::Insert(Insert { or, - table: table_object, + table_name, table_alias, ignore, into, @@ -14657,57 +11019,16 @@ impl<'a> Parser<'a> { columns, after_columns, source, - assignments, - has_table_keyword: table, + table, on, returning, replace_into, priority, insert_alias, - settings, - format_clause, })) } } - // Parses input format clause used for [ClickHouse]. - // - // - pub fn parse_input_format_clause(&mut self) -> Result { - let ident = self.parse_identifier()?; - let values = self - .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))? - .unwrap_or_default(); - - Ok(InputFormatClause { ident, values }) - } - - /// Returns true if the immediate tokens look like the - /// beginning of a subquery. `(SELECT ...` - fn peek_subquery_start(&mut self) -> bool { - let [maybe_lparen, maybe_select] = self.peek_tokens(); - Token::LParen == maybe_lparen - && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT) - } - - fn parse_conflict_clause(&mut self) -> Option { - if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) { - Some(SqliteOnConflict::Replace) - } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) { - Some(SqliteOnConflict::Rollback) - } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) { - Some(SqliteOnConflict::Abort) - } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) { - Some(SqliteOnConflict::Fail) - } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) { - Some(SqliteOnConflict::Ignore) - } else if self.parse_keyword(Keyword::REPLACE) { - Some(SqliteOnConflict::Replace) - } else { - None - } - } - pub fn parse_insert_partition(&mut self) -> Result>, ParserError> { if self.parse_keyword(Keyword::PARTITION) { self.expect_token(&Token::LParen)?; @@ -14719,22 +11040,6 @@ impl<'a> Parser<'a> { } } - pub fn parse_load_data_table_format( - &mut self, - ) -> Result, ParserError> { - if self.parse_keyword(Keyword::INPUTFORMAT) { - let input_format = self.parse_expr()?; - self.expect_keyword_is(Keyword::SERDE)?; - let serde = self.parse_expr()?; - Ok(Some(HiveLoadDataFormat { - input_format, - serde, - })) - } else { - Ok(None) - } - } - /// Parse an UPDATE statement, returning a `Box`ed SetExpr /// /// This is used to reduce the size of the stack frames in debug builds @@ -14743,23 +11048,15 @@ impl<'a> Parser<'a> { } pub fn parse_update(&mut self) -> Result { - let or = self.parse_conflict_clause(); let table = self.parse_table_and_joins()?; - let from_before_set = if self.parse_keyword(Keyword::FROM) { - Some(UpdateTableFromKind::BeforeSet( - self.parse_table_with_joins()?, - )) - } else { - None - }; self.expect_keyword(Keyword::SET)?; let assignments = self.parse_comma_separated(Parser::parse_assignment)?; - let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) { - Some(UpdateTableFromKind::AfterSet( - self.parse_table_with_joins()?, - )) + let from = if self.parse_keyword(Keyword::FROM) + && dialect_of!(self is GenericDialect | PostgreSqlDialect | DuckDbDialect | BigQueryDialect | SnowflakeDialect | RedshiftSqlDialect | MsSqlDialect | SQLiteDialect ) + { + Some(self.parse_table_and_joins()?) } else { - from_before_set + None }; let selection = if self.parse_keyword(Keyword::WHERE) { Some(self.parse_expr()?) @@ -14777,7 +11074,6 @@ impl<'a> Parser<'a> { from, selection, returning, - or, }) } @@ -14802,61 +11098,45 @@ impl<'a> Parser<'a> { } pub fn parse_function_args(&mut self) -> Result { - let arg = if self.dialect.supports_named_fn_args_with_expr_name() { - self.maybe_parse(|p| { - let name = p.parse_expr()?; - let operator = p.parse_function_named_arg_operator()?; - let arg = p.parse_wildcard_expr()?.into(); - Ok(FunctionArg::ExprNamed { - name, - arg, - operator, - }) - })? - } else { - self.maybe_parse(|p| { - let name = p.parse_identifier()?; - let operator = p.parse_function_named_arg_operator()?; - let arg = p.parse_wildcard_expr()?.into(); - Ok(FunctionArg::Named { - name, - arg, - operator, - }) - })? - }; - if let Some(arg) = arg { - return Ok(arg); - } - Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into())) - } + if self.peek_nth_token(1) == Token::RArrow { + let name = self.parse_identifier(false)?; - fn parse_function_named_arg_operator(&mut self) -> Result { - if self.parse_keyword(Keyword::VALUE) { - return Ok(FunctionArgOperator::Value); - } - let tok = self.next_token(); - match tok.token { - Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => { - Ok(FunctionArgOperator::RightArrow) - } - Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => { - Ok(FunctionArgOperator::Equals) - } - Token::Assignment - if self - .dialect - .supports_named_fn_args_with_assignment_operator() => - { - Ok(FunctionArgOperator::Assignment) - } - Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => { - Ok(FunctionArgOperator::Colon) - } - _ => { - self.prev_token(); - self.expected("argument operator", tok) - } + self.expect_token(&Token::RArrow)?; + let arg = self.parse_wildcard_expr()?.into(); + + Ok(FunctionArg::Named { + name, + arg, + operator: FunctionArgOperator::RightArrow, + }) + } else if self.dialect.supports_named_fn_args_with_eq_operator() + && self.peek_nth_token(1) == Token::Eq + { + let name = self.parse_identifier(false)?; + + self.expect_token(&Token::Eq)?; + let arg = self.parse_wildcard_expr()?.into(); + + Ok(FunctionArg::Named { + name, + arg, + operator: FunctionArgOperator::Equals, + }) + } else if dialect_of!(self is DuckDbDialect | GenericDialect) + && self.peek_nth_token(1) == Token::Assignment + { + let name = self.parse_identifier(false)?; + + self.expect_token(&Token::Assignment)?; + let arg = self.parse_expr()?.into(); + + Ok(FunctionArg::Named { + name, + arg, + operator: FunctionArgOperator::Assignment, + }) + } else { + Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into())) } } @@ -14900,24 +11180,19 @@ impl<'a> Parser<'a> { /// FIRST_VALUE(x IGNORE NULL); /// ``` fn parse_function_argument_list(&mut self) -> Result { - let mut clauses = vec![]; - - // For MSSQL empty argument list with json-null-clause case, e.g. `JSON_ARRAY(NULL ON NULL)` - if let Some(null_clause) = self.parse_json_null_clause() { - clauses.push(FunctionArgumentClause::JsonNullClause(null_clause)); - } - if self.consume_token(&Token::RParen) { return Ok(FunctionArgumentList { duplicate_treatment: None, args: vec![], - clauses, + clauses: vec![], }); } let duplicate_treatment = self.parse_duplicate_treatment()?; let args = self.parse_comma_separated(Parser::parse_function_args)?; + let mut clauses = vec![]; + if self.dialect.supports_window_function_null_treatment_arg() { if let Some(null_treatment) = self.parse_null_treatment()? { clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment)); @@ -14951,17 +11226,13 @@ impl<'a> Parser<'a> { if dialect_of!(self is GenericDialect | MySqlDialect) && self.parse_keyword(Keyword::SEPARATOR) { - clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value)); + clauses.push(FunctionArgumentClause::Separator(self.parse_value()?)); } if let Some(on_overflow) = self.parse_listagg_on_overflow()? { clauses.push(FunctionArgumentClause::OnOverflow(on_overflow)); } - if let Some(null_clause) = self.parse_json_null_clause() { - clauses.push(FunctionArgumentClause::JsonNullClause(null_clause)); - } - self.expect_token(&Token::RParen)?; Ok(FunctionArgumentList { duplicate_treatment, @@ -14970,19 +11241,8 @@ impl<'a> Parser<'a> { }) } - /// Parses MSSQL's json-null-clause - fn parse_json_null_clause(&mut self) -> Option { - if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) { - Some(JsonNullClause::AbsentOnNull) - } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) { - Some(JsonNullClause::NullOnNull) - } else { - None - } - } - fn parse_duplicate_treatment(&mut self) -> Result, ParserError> { - let loc = self.peek_token().span.start; + let loc = self.peek_token().location; match ( self.parse_keyword(Keyword::ALL), self.parse_keyword(Keyword::DISTINCT), @@ -14996,25 +11256,18 @@ impl<'a> Parser<'a> { /// Parse a comma-delimited list of projections after SELECT pub fn parse_select_item(&mut self) -> Result { - let prefix = self - .parse_one_of_keywords( - self.dialect - .get_reserved_keywords_for_select_item_operator(), - ) - .map(|keyword| Ident::new(format!("{keyword:?}"))); - match self.parse_wildcard_expr()? { - Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard( - SelectItemQualifiedWildcardKind::ObjectName(prefix), - self.parse_wildcard_additional_options(token.0)?, + Expr::QualifiedWildcard(prefix) => Ok(SelectItem::QualifiedWildcard( + prefix, + self.parse_wildcard_additional_options()?, )), - Expr::Wildcard(token) => Ok(SelectItem::Wildcard( - self.parse_wildcard_additional_options(token.0)?, + Expr::Wildcard => Ok(SelectItem::Wildcard( + self.parse_wildcard_additional_options()?, )), Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => { parser_err!( format!("Expected an expression, found: {}", v), - self.peek_token().span.start + self.peek_token().location ) } Expr::BinaryOp { @@ -15027,7 +11280,7 @@ impl<'a> Parser<'a> { let Expr::Identifier(alias) = *left else { return parser_err!( "BUG: expected identifier expression as alias", - self.peek_token().span.start + self.peek_token().location ); }; Ok(SelectItem::ExprWithAlias { @@ -15035,23 +11288,11 @@ impl<'a> Parser<'a> { alias, }) } - expr if self.dialect.supports_select_expr_star() - && self.consume_tokens(&[Token::Period, Token::Mul]) => - { - let wildcard_token = self.get_previous_token().clone(); - Ok(SelectItem::QualifiedWildcard( - SelectItemQualifiedWildcardKind::Expr(expr), - self.parse_wildcard_additional_options(wildcard_token)?, - )) - } expr => self - .maybe_parse_select_item_alias() + .parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) .map(|alias| match alias { - Some(alias) => SelectItem::ExprWithAlias { - expr: maybe_prefixed_expr(expr, prefix), - alias, - }, - None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)), + Some(alias) => SelectItem::ExprWithAlias { expr, alias }, + None => SelectItem::UnnamedExpr(expr), }), } } @@ -15061,14 +11302,14 @@ impl<'a> Parser<'a> { /// If it is not possible to parse it, will return an option. pub fn parse_wildcard_additional_options( &mut self, - wildcard_token: TokenWithSpan, ) -> Result { let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) { self.parse_optional_select_item_ilike()? } else { None }; - let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude() + let opt_exclude = if opt_ilike.is_none() + && dialect_of!(self is GenericDialect | DuckDbDialect | SnowflakeDialect) { self.parse_optional_select_item_exclude()? } else { @@ -15092,7 +11333,6 @@ impl<'a> Parser<'a> { }; Ok(WildcardAdditionalOptions { - wildcard_token: wildcard_token.into(), opt_ilike, opt_exclude, opt_except, @@ -15128,11 +11368,12 @@ impl<'a> Parser<'a> { ) -> Result, ParserError> { let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) { if self.consume_token(&Token::LParen) { - let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?; + let columns = + self.parse_comma_separated(|parser| parser.parse_identifier(false))?; self.expect_token(&Token::RParen)?; Some(ExcludeSelectItem::Multiple(columns)) } else { - let column = self.parse_identifier()?; + let column = self.parse_identifier(false)?; Some(ExcludeSelectItem::Single(column)) } } else { @@ -15165,7 +11406,7 @@ impl<'a> Parser<'a> { } } else { // Clickhouse allows EXCEPT column_name - let ident = self.parse_identifier()?; + let ident = self.parse_identifier(false)?; Some(ExceptSelectItem { first_element: ident, additional_elements: vec![], @@ -15223,7 +11464,7 @@ impl<'a> Parser<'a> { pub fn parse_replace_elements(&mut self) -> Result { let expr = self.parse_expr()?; let as_keyword = self.parse_keyword(Keyword::AS); - let ident = self.parse_identifier()?; + let ident = self.parse_identifier(false)?; Ok(ReplaceSelectElement { expr, column_name: ident, @@ -15243,63 +11484,10 @@ impl<'a> Parser<'a> { } } - /// Parse an [OrderByExpr] expression. + /// Parse an expression, optionally followed by ASC or DESC (used in ORDER BY) pub fn parse_order_by_expr(&mut self) -> Result { - self.parse_order_by_expr_inner(false) - .map(|(order_by, _)| order_by) - } - - /// Parse an [IndexColumn]. - pub fn parse_create_index_expr(&mut self) -> Result { - self.parse_order_by_expr_inner(true) - .map(|(column, operator_class)| IndexColumn { - column, - operator_class, - }) - } - - fn parse_order_by_expr_inner( - &mut self, - with_operator_class: bool, - ) -> Result<(OrderByExpr, Option), ParserError> { let expr = self.parse_expr()?; - let operator_class: Option = if with_operator_class { - // We check that if non of the following keywords are present, then we parse an - // identifier as operator class. - if self - .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH]) - .is_some() - { - None - } else { - self.maybe_parse(|parser| parser.parse_identifier())? - } - } else { - None - }; - - let options = self.parse_order_by_options()?; - - let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect) - && self.parse_keywords(&[Keyword::WITH, Keyword::FILL]) - { - Some(self.parse_with_fill()?) - } else { - None - }; - - Ok(( - OrderByExpr { - expr, - options, - with_fill, - }, - operator_class, - )) - } - - fn parse_order_by_options(&mut self) -> Result { let asc = self.parse_asc_desc(); let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) { @@ -15310,7 +11498,20 @@ impl<'a> Parser<'a> { None }; - Ok(OrderByOptions { asc, nulls_first }) + let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect) + && self.parse_keywords(&[Keyword::WITH, Keyword::FILL]) + { + Some(self.parse_with_fill()?) + } else { + None + }; + + Ok(OrderByExpr { + expr, + asc, + nulls_first, + with_fill, + }) } // Parse a WITH FILL clause (ClickHouse dialect) @@ -15337,7 +11538,7 @@ impl<'a> Parser<'a> { Ok(WithFill { from, to, step }) } - // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect) + // Parse a set of comma seperated INTERPOLATE expressions (ClickHouse dialect) // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier pub fn parse_interpolations(&mut self) -> Result, ParserError> { if !self.parse_keyword(Keyword::INTERPOLATE) { @@ -15360,7 +11561,7 @@ impl<'a> Parser<'a> { // Parse a INTERPOLATE expression (ClickHouse dialect) pub fn parse_interpolation(&mut self) -> Result { - let column = self.parse_identifier()?; + let column = self.parse_identifier(false)?; let expr = if self.parse_keyword(Keyword::AS) { Some(self.parse_expr()?) } else { @@ -15379,7 +11580,7 @@ impl<'a> Parser<'a> { } else { let next_token = self.next_token(); let quantity = match next_token.token { - Token::Number(s, _) => Self::parse::(s, next_token.span.start)?, + Token::Number(s, _) => Self::parse::(s, next_token.location)?, _ => self.expected("literal int", next_token)?, }; Some(TopQuantity::Constant(quantity)) @@ -15420,8 +11621,7 @@ impl<'a> Parser<'a> { /// Parse a FETCH clause pub fn parse_fetch(&mut self) -> Result { - let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]); - + self.expect_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT])?; let (quantity, percent) = if self .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]) .is_some() @@ -15430,16 +11630,16 @@ impl<'a> Parser<'a> { } else { let quantity = Expr::Value(self.parse_value()?); let percent = self.parse_keyword(Keyword::PERCENT); - let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]); + self.expect_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])?; (Some(quantity), percent) }; - let with_ties = if self.parse_keyword(Keyword::ONLY) { false + } else if self.parse_keywords(&[Keyword::WITH, Keyword::TIES]) { + true } else { - self.parse_keywords(&[Keyword::WITH, Keyword::TIES]) + return self.expected("one of ONLY or WITH TIES", self.peek_token()); }; - Ok(Fetch { with_ties, percent, @@ -15495,15 +11695,11 @@ impl<'a> Parser<'a> { } pub fn parse_start_transaction(&mut self) -> Result { - self.expect_keyword_is(Keyword::TRANSACTION)?; + self.expect_keyword(Keyword::TRANSACTION)?; Ok(Statement::StartTransaction { modes: self.parse_transaction_modes()?, begin: false, - transaction: Some(BeginTransactionKind::Transaction), modifier: None, - statements: vec![], - exception: None, - has_end_keyword: false, }) } @@ -15516,88 +11712,20 @@ impl<'a> Parser<'a> { Some(TransactionModifier::Immediate) } else if self.parse_keyword(Keyword::EXCLUSIVE) { Some(TransactionModifier::Exclusive) - } else if self.parse_keyword(Keyword::TRY) { - Some(TransactionModifier::Try) - } else if self.parse_keyword(Keyword::CATCH) { - Some(TransactionModifier::Catch) } else { None }; - let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) { - Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction), - Some(Keyword::WORK) => Some(BeginTransactionKind::Work), - _ => None, - }; + let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]); Ok(Statement::StartTransaction { modes: self.parse_transaction_modes()?, begin: true, - transaction, modifier, - statements: vec![], - exception: None, - has_end_keyword: false, - }) - } - - pub fn parse_begin_exception_end(&mut self) -> Result { - let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?; - - let exception = if self.parse_keyword(Keyword::EXCEPTION) { - let mut when = Vec::new(); - - // We can have multiple `WHEN` arms so we consume all cases until `END` - while !self.peek_keyword(Keyword::END) { - self.expect_keyword(Keyword::WHEN)?; - - // Each `WHEN` case can have one or more conditions, e.g. - // WHEN EXCEPTION_1 [OR EXCEPTION_2] THEN - // So we parse identifiers until the `THEN` keyword. - let mut idents = Vec::new(); - - while !self.parse_keyword(Keyword::THEN) { - let ident = self.parse_identifier()?; - idents.push(ident); - - self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?; - } - - let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?; - - when.push(ExceptionWhen { idents, statements }); - } - - Some(when) - } else { - None - }; - - self.expect_keyword(Keyword::END)?; - - Ok(Statement::StartTransaction { - begin: true, - statements, - exception, - has_end_keyword: true, - transaction: None, - modifier: None, - modes: Default::default(), }) } pub fn parse_end(&mut self) -> Result { - let modifier = if !self.dialect.supports_end_transaction_modifier() { - None - } else if self.parse_keyword(Keyword::TRY) { - Some(TransactionModifier::Try) - } else if self.parse_keyword(Keyword::CATCH) { - Some(TransactionModifier::Catch) - } else { - None - }; Ok(Statement::Commit { chain: self.parse_commit_rollback_chain()?, - end: true, - modifier, }) } @@ -15614,8 +11742,6 @@ impl<'a> Parser<'a> { TransactionIsolationLevel::RepeatableRead } else if self.parse_keyword(Keyword::SERIALIZABLE) { TransactionIsolationLevel::Serializable - } else if self.parse_keyword(Keyword::SNAPSHOT) { - TransactionIsolationLevel::Snapshot } else { self.expected("isolation level", self.peek_token())? }; @@ -15642,8 +11768,6 @@ impl<'a> Parser<'a> { pub fn parse_commit(&mut self) -> Result { Ok(Statement::Commit { chain: self.parse_commit_rollback_chain()?, - end: false, - modifier: None, }) } @@ -15658,7 +11782,7 @@ impl<'a> Parser<'a> { let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]); if self.parse_keyword(Keyword::AND) { let chain = !self.parse_keyword(Keyword::NO); - self.expect_keyword_is(Keyword::CHAIN)?; + self.expect_keyword(Keyword::CHAIN)?; Ok(chain) } else { Ok(false) @@ -15668,7 +11792,7 @@ impl<'a> Parser<'a> { pub fn parse_rollback_savepoint(&mut self) -> Result, ParserError> { if self.parse_keyword(Keyword::TO) { let _ = self.parse_keyword(Keyword::SAVEPOINT); - let savepoint = self.parse_identifier()?; + let savepoint = self.parse_identifier(false)?; Ok(Some(savepoint)) } else { @@ -15676,101 +11800,39 @@ impl<'a> Parser<'a> { } } - /// Parse a 'RAISERROR' statement - pub fn parse_raiserror(&mut self) -> Result { - self.expect_token(&Token::LParen)?; - let message = Box::new(self.parse_expr()?); - self.expect_token(&Token::Comma)?; - let severity = Box::new(self.parse_expr()?); - self.expect_token(&Token::Comma)?; - let state = Box::new(self.parse_expr()?); - let arguments = if self.consume_token(&Token::Comma) { - self.parse_comma_separated(Parser::parse_expr)? - } else { - vec![] - }; - self.expect_token(&Token::RParen)?; - let options = if self.parse_keyword(Keyword::WITH) { - self.parse_comma_separated(Parser::parse_raiserror_option)? - } else { - vec![] - }; - Ok(Statement::RaisError { - message, - severity, - state, - arguments, - options, - }) - } - - pub fn parse_raiserror_option(&mut self) -> Result { - match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? { - Keyword::LOG => Ok(RaisErrorOption::Log), - Keyword::NOWAIT => Ok(RaisErrorOption::NoWait), - Keyword::SETERROR => Ok(RaisErrorOption::SetError), - _ => self.expected( - "LOG, NOWAIT OR SETERROR raiserror option", - self.peek_token(), - ), - } - } - pub fn parse_deallocate(&mut self) -> Result { let prepare = self.parse_keyword(Keyword::PREPARE); - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; Ok(Statement::Deallocate { name, prepare }) } pub fn parse_execute(&mut self) -> Result { - let name = if self.dialect.supports_execute_immediate() - && self.parse_keyword(Keyword::IMMEDIATE) - { - None - } else { - let name = self.parse_object_name(false)?; - Some(name) - }; + let name = self.parse_identifier(false)?; - let has_parentheses = self.consume_token(&Token::LParen); - - let end_token = match (has_parentheses, self.peek_token().token) { - (true, _) => Token::RParen, - (false, Token::EOF) => Token::EOF, - (false, Token::Word(w)) if w.keyword == Keyword::USING => Token::Word(w), - (false, _) => Token::SemiColon, - }; - - let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?; - - if has_parentheses { + let mut parameters = vec![]; + if self.consume_token(&Token::LParen) { + parameters = self.parse_comma_separated(Parser::parse_expr)?; self.expect_token(&Token::RParen)?; } - let into = if self.parse_keyword(Keyword::INTO) { - self.parse_comma_separated(Self::parse_identifier)? - } else { - vec![] - }; + let mut using = vec![]; + if self.parse_keyword(Keyword::USING) { + using.push(self.parse_expr()?); - let using = if self.parse_keyword(Keyword::USING) { - self.parse_comma_separated(Self::parse_expr_with_alias)? - } else { - vec![] + while self.consume_token(&Token::Comma) { + using.push(self.parse_expr()?); + } }; Ok(Statement::Execute { - immediate: name.is_none(), name, parameters, - has_parentheses, - into, using, }) } pub fn parse_prepare(&mut self) -> Result { - let name = self.parse_identifier()?; + let name = self.parse_identifier(false)?; let mut data_types = vec![]; if self.consume_token(&Token::LParen) { @@ -15778,7 +11840,7 @@ impl<'a> Parser<'a> { self.expect_token(&Token::RParen)?; } - self.expect_keyword_is(Keyword::AS)?; + self.expect_keyword(Keyword::AS)?; let statement = Box::new(self.parse_statement()?); Ok(Statement::Prepare { name, @@ -15792,8 +11854,8 @@ impl<'a> Parser<'a> { let query = self.parse_query()?; self.expect_token(&Token::RParen)?; - self.expect_keyword_is(Keyword::TO)?; - let to = self.parse_identifier()?; + self.expect_keyword(Keyword::TO)?; + let to = self.parse_identifier(false)?; let with_options = self.parse_options(Keyword::WITH)?; @@ -15807,15 +11869,16 @@ impl<'a> Parser<'a> { pub fn parse_merge_clauses(&mut self) -> Result, ParserError> { let mut clauses = vec![]; loop { - if !(self.parse_keyword(Keyword::WHEN)) { + if self.peek_token() == Token::EOF || self.peek_token() == Token::SemiColon { break; } + self.expect_keyword(Keyword::WHEN)?; let mut clause_kind = MergeClauseKind::Matched; if self.parse_keyword(Keyword::NOT) { clause_kind = MergeClauseKind::NotMatched; } - self.expect_keyword_is(Keyword::MATCHED)?; + self.expect_keyword(Keyword::MATCHED)?; if matches!(clause_kind, MergeClauseKind::NotMatched) && self.parse_keywords(&[Keyword::BY, Keyword::SOURCE]) @@ -15833,7 +11896,7 @@ impl<'a> Parser<'a> { None }; - self.expect_keyword_is(Keyword::THEN)?; + self.expect_keyword(Keyword::THEN)?; let merge_clause = match self.parse_one_of_keywords(&[ Keyword::UPDATE, @@ -15849,7 +11912,7 @@ impl<'a> Parser<'a> { "UPDATE is not allowed in a {clause_kind} merge clause" ))); } - self.expect_keyword_is(Keyword::SET)?; + self.expect_keyword(Keyword::SET)?; MergeAction::Update { assignments: self.parse_comma_separated(Parser::parse_assignment)?, } @@ -15882,7 +11945,7 @@ impl<'a> Parser<'a> { { MergeInsertKind::Row } else { - self.expect_keyword_is(Keyword::VALUES)?; + self.expect_keyword(Keyword::VALUES)?; let values = self.parse_values(is_mysql)?; MergeInsertKind::Values(values) }; @@ -15903,49 +11966,16 @@ impl<'a> Parser<'a> { Ok(clauses) } - fn parse_output(&mut self) -> Result { - self.expect_keyword_is(Keyword::OUTPUT)?; - let select_items = self.parse_projection()?; - self.expect_keyword_is(Keyword::INTO)?; - let into_table = self.parse_select_into()?; - - Ok(OutputClause { - select_items, - into_table, - }) - } - - fn parse_select_into(&mut self) -> Result { - let temporary = self - .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY]) - .is_some(); - let unlogged = self.parse_keyword(Keyword::UNLOGGED); - let table = self.parse_keyword(Keyword::TABLE); - let name = self.parse_object_name(false)?; - - Ok(SelectInto { - temporary, - unlogged, - table, - name, - }) - } - pub fn parse_merge(&mut self) -> Result { let into = self.parse_keyword(Keyword::INTO); let table = self.parse_table_factor()?; - self.expect_keyword_is(Keyword::USING)?; + self.expect_keyword(Keyword::USING)?; let source = self.parse_table_factor()?; - self.expect_keyword_is(Keyword::ON)?; + self.expect_keyword(Keyword::ON)?; let on = self.parse_expr()?; let clauses = self.parse_merge_clauses()?; - let output = if self.peek_keyword(Keyword::OUTPUT) { - Some(self.parse_output()?) - } else { - None - }; Ok(Statement::Merge { into, @@ -15953,12 +11983,11 @@ impl<'a> Parser<'a> { source, on: Box::new(on), clauses, - output, }) } fn parse_pragma_value(&mut self) -> Result { - match self.parse_value()?.value { + match self.parse_value()? { v @ Value::SingleQuotedString(_) => Ok(v), v @ Value::DoubleQuotedString(_) => Ok(v), v @ Value::Number(_, _) => Ok(v), @@ -15998,40 +12027,15 @@ impl<'a> Parser<'a> { /// `INSTALL [extension_name]` pub fn parse_install(&mut self) -> Result { - let extension_name = self.parse_identifier()?; + let extension_name = self.parse_identifier(false)?; Ok(Statement::Install { extension_name }) } - /// Parse a SQL LOAD statement + /// `LOAD [extension_name]` pub fn parse_load(&mut self) -> Result { - if self.dialect.supports_load_extension() { - let extension_name = self.parse_identifier()?; - Ok(Statement::Load { extension_name }) - } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() { - let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some(); - self.expect_keyword_is(Keyword::INPATH)?; - let inpath = self.parse_literal_string()?; - let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some(); - self.expect_keyword_is(Keyword::INTO)?; - self.expect_keyword_is(Keyword::TABLE)?; - let table_name = self.parse_object_name(false)?; - let partitioned = self.parse_insert_partition()?; - let table_format = self.parse_load_data_table_format()?; - Ok(Statement::LoadData { - local, - inpath, - overwrite, - table_name, - partitioned, - table_format, - }) - } else { - self.expected( - "`DATA` or an extension name after `LOAD`", - self.peek_token(), - ) - } + let extension_name = self.parse_identifier(false)?; + Ok(Statement::Load { extension_name }) } /// ```sql @@ -16039,13 +12043,13 @@ impl<'a> Parser<'a> { /// ``` /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize) pub fn parse_optimize_table(&mut self) -> Result { - self.expect_keyword_is(Keyword::TABLE)?; + self.expect_keyword(Keyword::TABLE)?; let name = self.parse_object_name(false)?; let on_cluster = self.parse_optional_on_cluster()?; let partition = if self.parse_keyword(Keyword::PARTITION) { if self.parse_keyword(Keyword::ID) { - Some(Partition::Identifier(self.parse_identifier()?)) + Some(Partition::Identifier(self.parse_identifier(false)?)) } else { Some(Partition::Expr(self.parse_expr()?)) } @@ -16092,7 +12096,7 @@ impl<'a> Parser<'a> { // [ OWNED BY { table_name.column_name | NONE } ] let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) { if self.parse_keywords(&[Keyword::NONE]) { - Some(ObjectName::from(vec![Ident::new("NONE")])) + Some(ObjectName(vec![Ident::new("NONE")])) } else { Some(self.parse_object_name(false)?) } @@ -16154,62 +12158,19 @@ impl<'a> Parser<'a> { Ok(sequence_options) } - /// Parse a `CREATE SERVER` statement. - /// - /// See [Statement::CreateServer] - pub fn parse_pg_create_server(&mut self) -> Result { - let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); - let name = self.parse_object_name(false)?; - - let server_type = if self.parse_keyword(Keyword::TYPE) { - Some(self.parse_identifier()?) - } else { - None - }; - - let version = if self.parse_keyword(Keyword::VERSION) { - Some(self.parse_identifier()?) - } else { - None - }; - - self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?; - let foreign_data_wrapper = self.parse_object_name(false)?; - - let mut options = None; - if self.parse_keyword(Keyword::OPTIONS) { - self.expect_token(&Token::LParen)?; - options = Some(self.parse_comma_separated(|p| { - let key = p.parse_identifier()?; - let value = p.parse_identifier()?; - Ok(CreateServerOption { key, value }) - })?); - self.expect_token(&Token::RParen)?; - } - - Ok(Statement::CreateServer(CreateServerStatement { - name, - if_not_exists: ine, - server_type, - version, - foreign_data_wrapper, - options, - })) - } - /// The index of the first unprocessed token. pub fn index(&self) -> usize { self.index } pub fn parse_named_window(&mut self) -> Result { - let ident = self.parse_identifier()?; - self.expect_keyword_is(Keyword::AS)?; + let ident = self.parse_identifier(false)?; + self.expect_keyword(Keyword::AS)?; let window_expr = if self.consume_token(&Token::LParen) { NamedWindowExpr::WindowSpec(self.parse_window_spec()?) } else if self.dialect.supports_window_clause_named_window_reference() { - NamedWindowExpr::NamedWindow(self.parse_identifier()?) + NamedWindowExpr::NamedWindow(self.parse_identifier(false)?) } else { return self.expected("(", self.peek_token()); }; @@ -16220,30 +12181,22 @@ impl<'a> Parser<'a> { pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result { let name = self.parse_object_name(false)?; let params = self.parse_optional_procedure_parameters()?; - - let language = if self.parse_keyword(Keyword::LANGUAGE) { - Some(self.parse_identifier()?) - } else { - None - }; - - self.expect_keyword_is(Keyword::AS)?; - - let body = self.parse_conditional_statements(&[Keyword::END])?; - + self.expect_keyword(Keyword::AS)?; + self.expect_keyword(Keyword::BEGIN)?; + let statements = self.parse_statements()?; + self.expect_keyword(Keyword::END)?; Ok(Statement::CreateProcedure { name, or_alter, params, - language, - body, + body: statements, }) } pub fn parse_window_spec(&mut self) -> Result { let window_name = match self.peek_token().token { Token::Word(word) if word.keyword == Keyword::NoKeyword => { - self.parse_optional_ident()? + self.parse_optional_indent()? } _ => None, }; @@ -16276,11 +12229,7 @@ impl<'a> Parser<'a> { pub fn parse_create_type(&mut self) -> Result { let name = self.parse_object_name(false)?; - self.expect_keyword_is(Keyword::AS)?; - - if self.parse_keyword(Keyword::ENUM) { - return self.parse_create_type_enum(name); - } + self.expect_keyword(Keyword::AS)?; let mut attributes = vec![]; if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) { @@ -16291,7 +12240,7 @@ impl<'a> Parser<'a> { } loop { - let attr_name = self.parse_identifier()?; + let attr_name = self.parse_identifier(false)?; let attr_data_type = self.parse_data_type()?; let attr_collation = if self.parse_keyword(Keyword::COLLATE) { Some(self.parse_object_name(false)?) @@ -16318,25 +12267,11 @@ impl<'a> Parser<'a> { }) } - /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type]) - /// - /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html) - pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result { - self.expect_token(&Token::LParen)?; - let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?; - self.expect_token(&Token::RParen)?; - - Ok(Statement::CreateType { - name, - representation: UserDefinedTypeRepresentation::Enum { labels }, - }) - } - fn parse_parenthesized_identifiers(&mut self) -> Result, ParserError> { self.expect_token(&Token::LParen)?; - let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?; + let partitions = self.parse_comma_separated(|p| p.parse_identifier(false))?; self.expect_token(&Token::RParen)?; - Ok(idents) + Ok(partitions) } fn parse_column_position(&mut self) -> Result, ParserError> { @@ -16344,7 +12279,7 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::FIRST) { Ok(Some(MySQLColumnPosition::First)) } else if self.parse_keyword(Keyword::AFTER) { - let ident = self.parse_identifier()?; + let ident = self.parse_identifier(false)?; Ok(Some(MySQLColumnPosition::After(ident))) } else { Ok(None) @@ -16354,25 +12289,8 @@ impl<'a> Parser<'a> { } } - /// Parse [Statement::Print] - fn parse_print(&mut self) -> Result { - Ok(Statement::Print(PrintStatement { - message: Box::new(self.parse_expr()?), - })) - } - - /// Parse [Statement::Return] - fn parse_return(&mut self) -> Result { - match self.maybe_parse(|p| p.parse_expr())? { - Some(expr) => Ok(Statement::Return(ReturnStatement { - value: Some(ReturnStatementValue::Expr(expr)), - })), - None => Ok(Statement::Return(ReturnStatement { value: None })), - } - } - /// Consume the parser and return its underlying token buffer - pub fn into_tokens(self) -> Vec { + pub fn into_tokens(self) -> Vec { self.tokens } @@ -16387,155 +12305,13 @@ impl<'a> Parser<'a> { } false } - - pub(crate) fn parse_show_stmt_options(&mut self) -> Result { - let show_in; - let mut filter_position = None; - if self.dialect.supports_show_like_before_in() { - if let Some(filter) = self.parse_show_statement_filter()? { - filter_position = Some(ShowStatementFilterPosition::Infix(filter)); - } - show_in = self.maybe_parse_show_stmt_in()?; - } else { - show_in = self.maybe_parse_show_stmt_in()?; - if let Some(filter) = self.parse_show_statement_filter()? { - filter_position = Some(ShowStatementFilterPosition::Suffix(filter)); - } - } - let starts_with = self.maybe_parse_show_stmt_starts_with()?; - let limit = self.maybe_parse_show_stmt_limit()?; - let from = self.maybe_parse_show_stmt_from()?; - Ok(ShowStatementOptions { - filter_position, - show_in, - starts_with, - limit, - limit_from: from, - }) - } - - fn maybe_parse_show_stmt_in(&mut self) -> Result, ParserError> { - let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) { - Some(Keyword::FROM) => ShowStatementInClause::FROM, - Some(Keyword::IN) => ShowStatementInClause::IN, - None => return Ok(None), - _ => return self.expected("FROM or IN", self.peek_token()), - }; - - let (parent_type, parent_name) = match self.parse_one_of_keywords(&[ - Keyword::ACCOUNT, - Keyword::DATABASE, - Keyword::SCHEMA, - Keyword::TABLE, - Keyword::VIEW, - ]) { - // If we see these next keywords it means we don't have a parent name - Some(Keyword::DATABASE) - if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH]) - | self.peek_keyword(Keyword::LIMIT) => - { - (Some(ShowStatementInParentType::Database), None) - } - Some(Keyword::SCHEMA) - if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH]) - | self.peek_keyword(Keyword::LIMIT) => - { - (Some(ShowStatementInParentType::Schema), None) - } - Some(parent_kw) => { - // The parent name here is still optional, for example: - // SHOW TABLES IN ACCOUNT, so parsing the object name - // may fail because the statement ends. - let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?; - match parent_kw { - Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name), - Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name), - Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name), - Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name), - Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name), - _ => { - return self.expected( - "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW", - self.peek_token(), - ) - } - } - } - None => { - // Parsing MySQL style FROM tbl_name FROM db_name - // which is equivalent to FROM tbl_name.db_name - let mut parent_name = self.parse_object_name(false)?; - if self - .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) - .is_some() - { - parent_name - .0 - .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?)); - } - (None, Some(parent_name)) - } - }; - - Ok(Some(ShowStatementIn { - clause, - parent_type, - parent_name, - })) - } - - fn maybe_parse_show_stmt_starts_with(&mut self) -> Result, ParserError> { - if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) { - Ok(Some(self.parse_value()?.value)) - } else { - Ok(None) - } - } - - fn maybe_parse_show_stmt_limit(&mut self) -> Result, ParserError> { - if self.parse_keyword(Keyword::LIMIT) { - Ok(self.parse_limit()?) - } else { - Ok(None) - } - } - - fn maybe_parse_show_stmt_from(&mut self) -> Result, ParserError> { - if self.parse_keyword(Keyword::FROM) { - Ok(Some(self.parse_value()?.value)) - } else { - Ok(None) - } - } -} - -fn maybe_prefixed_expr(expr: Expr, prefix: Option) -> Expr { - if let Some(prefix) = prefix { - Expr::Prefixed { - prefix, - value: Box::new(expr), - } - } else { - expr - } } impl Word { - #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")] - pub fn to_ident(&self, span: Span) -> Ident { + pub fn to_ident(&self) -> Ident { Ident { value: self.value.clone(), quote_style: self.quote_style, - span, - } - } - - /// Convert this word into an [`Ident`] identifier - pub fn into_ident(self, span: Span) -> Ident { - Ident { - value: self.value, - quote_style: self.quote_style, - span, } } } @@ -16798,14 +12574,14 @@ mod tests { test_parse_data_type!( dialect, "GEOMETRY", - DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![]) + DataType::Custom(ObjectName(vec!["GEOMETRY".into()]), vec![]) ); test_parse_data_type!( dialect, "GEOMETRY(POINT)", DataType::Custom( - ObjectName::from(vec!["GEOMETRY".into()]), + ObjectName(vec!["GEOMETRY".into()]), vec!["POINT".to_string()] ) ); @@ -16814,7 +12590,7 @@ mod tests { dialect, "GEOMETRY(POINT, 4326)", DataType::Custom( - ObjectName::from(vec!["GEOMETRY".into()]), + ObjectName(vec!["GEOMETRY".into()]), vec!["POINT".to_string(), "4326".to_string()] ) ); @@ -16950,7 +12726,7 @@ mod tests { }}; } - let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]); + let dummy_name = ObjectName(vec![Ident::new("dummy_name")]); let dummy_authorization = Ident::new("dummy_authorization"); test_parse_schema_name!( @@ -16982,20 +12758,6 @@ mod tests { }}; } - fn mk_expected_col(name: &str) -> IndexColumn { - IndexColumn { - column: OrderByExpr { - expr: Expr::Identifier(name.into()), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, - with_fill: None, - }, - operator_class: None, - } - } - let dialect = TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]); @@ -17006,7 +12768,7 @@ mod tests { display_as_key: false, name: None, index_type: None, - columns: vec![mk_expected_col("c1")], + columns: vec![Ident::new("c1")], } ); @@ -17017,7 +12779,7 @@ mod tests { display_as_key: true, name: None, index_type: None, - columns: vec![mk_expected_col("c1")], + columns: vec![Ident::new("c1")], } ); @@ -17028,7 +12790,7 @@ mod tests { display_as_key: false, name: Some(Ident::with_quote('\'', "index")), index_type: None, - columns: vec![mk_expected_col("c1"), mk_expected_col("c2")], + columns: vec![Ident::new("c1"), Ident::new("c2")], } ); @@ -17039,7 +12801,7 @@ mod tests { display_as_key: false, name: None, index_type: Some(IndexType::BTree), - columns: vec![mk_expected_col("c1")], + columns: vec![Ident::new("c1")], } ); @@ -17050,7 +12812,7 @@ mod tests { display_as_key: false, name: None, index_type: Some(IndexType::Hash), - columns: vec![mk_expected_col("c1")], + columns: vec![Ident::new("c1")], } ); @@ -17061,7 +12823,7 @@ mod tests { display_as_key: false, name: Some(Ident::new("idx_name")), index_type: Some(IndexType::BTree), - columns: vec![mk_expected_col("c1")], + columns: vec![Ident::new("c1")], } ); @@ -17072,7 +12834,7 @@ mod tests { display_as_key: false, name: Some(Ident::new("idx_name")), index_type: Some(IndexType::Hash), - columns: vec![mk_expected_col("c1")], + columns: vec![Ident::new("c1")], } ); } @@ -17096,7 +12858,7 @@ mod tests { assert_eq!( ast, Err(ParserError::ParserError( - "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16" + "Expected: [NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS, found: a at Line: 1, Column: 16" .to_string() )) ); @@ -17123,17 +12885,14 @@ mod tests { Ident { value: "CATALOG".to_string(), quote_style: None, - span: Span::empty(), }, Ident { value: "F(o)o. \"bar".to_string(), quote_style: Some('"'), - span: Span::empty(), }, Ident { value: "table".to_string(), quote_style: None, - span: Span::empty(), }, ]; dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| { @@ -17146,12 +12905,10 @@ mod tests { Ident { value: "CATALOG".to_string(), quote_style: None, - span: Span::empty(), }, Ident { value: "table".to_string(), quote_style: None, - span: Span::empty(), }, ]; dialect.run_parser_method("CATALOG . table", |parser| { @@ -17229,6 +12986,16 @@ mod tests { assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err()); } + #[test] + fn test_replace_into_set() { + // NOTE: This is actually valid MySQL syntax, REPLACE and INSERT, + // but the parser does not yet support it. + // https://dev.mysql.com/doc/refman/8.3/en/insert.html + let sql = "REPLACE INTO t SET a='1'"; + + assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err()); + } + #[test] fn test_replace_into_set_placeholder() { let sql = "REPLACE INTO t SET ?"; diff --git a/src/test_utils.rs b/src/test_utils.rs index 654f2723..b35fc45c 100644 --- a/src/test_utils.rs +++ b/src/test_utils.rs @@ -33,7 +33,7 @@ use core::fmt::Debug; use crate::dialect::*; use crate::parser::{Parser, ParserError}; -use crate::tokenizer::{Token, Tokenizer}; +use crate::tokenizer::Tokenizer; use crate::{ast::*, parser::ParserOptions}; #[cfg(test)] @@ -151,11 +151,10 @@ impl TestedDialects { /// /// 2. re-serializing the result of parsing `sql` produces the same /// `canonical` sql string - /// - /// For multiple statements, use [`statements_parse_to`]. pub fn one_statement_parses_to(&self, sql: &str, canonical: &str) -> Statement { let mut statements = self.parse_sql_statements(sql).expect(sql); assert_eq!(statements.len(), 1); + if !canonical.is_empty() && sql != canonical { assert_eq!(self.parse_sql_statements(canonical).unwrap(), statements); } @@ -168,24 +167,6 @@ impl TestedDialects { only_statement } - /// The same as [`one_statement_parses_to`] but it works for a multiple statements - pub fn statements_parse_to(&self, sql: &str, canonical: &str) -> Vec { - let statements = self.parse_sql_statements(sql).expect(sql); - if !canonical.is_empty() && sql != canonical { - assert_eq!(self.parse_sql_statements(canonical).unwrap(), statements); - } else { - assert_eq!( - sql, - statements - .iter() - .map(|s| s.to_string()) - .collect::>() - .join("; ") - ); - } - statements - } - /// Ensures that `sql` parses as an [`Expr`], and that /// re-serializing the parse result produces canonical pub fn expr_parses_to(&self, sql: &str, canonical: &str) -> Expr { @@ -257,22 +238,6 @@ impl TestedDialects { pub fn verified_expr(&self, sql: &str) -> Expr { self.expr_parses_to(sql, sql) } - - /// Check that the tokenizer returns the expected tokens for the given SQL. - pub fn tokenizes_to(&self, sql: &str, expected: Vec) { - if self.dialects.is_empty() { - panic!("No dialects to test"); - } - - self.dialects.iter().for_each(|dialect| { - let mut tokenizer = Tokenizer::new(&**dialect, sql); - if let Some(options) = &self.options { - tokenizer = tokenizer.with_unescape(options.unescape); - } - let tokens = tokenizer.tokenize().unwrap(); - assert_eq!(expected, tokens, "Tokenized differently for {dialect:?}"); - }); - } } /// Returns all available dialects. @@ -294,11 +259,6 @@ pub fn all_dialects() -> TestedDialects { ]) } -// Returns all available dialects with the specified parser options -pub fn all_dialects_with_options(options: ParserOptions) -> TestedDialects { - TestedDialects::new_with_options(all_dialects().dialects, options) -} - /// Returns all dialects matching the given predicate. pub fn all_dialects_where(predicate: F) -> TestedDialects where @@ -350,12 +310,10 @@ pub fn alter_table_op_with_name(stmt: Statement, expected_name: &str) -> AlterTa operations, on_cluster: _, location: _, - iceberg, } => { assert_eq!(name.to_string(), expected_name); assert!(!if_exists); assert!(!is_only); - assert!(!iceberg); only(operations) } _ => panic!("Expected ALTER TABLE statement"), @@ -371,11 +329,6 @@ pub fn number(n: &str) -> Value { Value::Number(n.parse().unwrap(), false) } -/// Creates a [Value::SingleQuotedString] -pub fn single_quoted_string(s: impl Into) -> Value { - Value::SingleQuotedString(s.into()) -} - pub fn table_alias(name: impl Into) -> Option { Some(TableAlias { name: Ident::new(name), @@ -385,37 +338,19 @@ pub fn table_alias(name: impl Into) -> Option { pub fn table(name: impl Into) -> TableFactor { TableFactor::Table { - name: ObjectName::from(vec![Ident::new(name.into())]), + name: ObjectName(vec![Ident::new(name.into())]), alias: None, args: None, with_hints: vec![], version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - } -} - -pub fn table_from_name(name: ObjectName) -> TableFactor { - TableFactor::Table { - name, - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], } } pub fn table_with_alias(name: impl Into, alias: impl Into) -> TableFactor { TableFactor::Table { - name: ObjectName::from(vec![Ident::new(name)]), + name: ObjectName(vec![Ident::new(name)]), alias: Some(TableAlias { name: Ident::new(alias), columns: vec![], @@ -425,9 +360,6 @@ pub fn table_with_alias(name: impl Into, alias: impl Into) -> Ta version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], } } @@ -435,14 +367,13 @@ pub fn join(relation: TableFactor) -> Join { Join { relation, global: false, - join_operator: JoinOperator::Join(JoinConstraint::Natural), + join_operator: JoinOperator::Inner(JoinConstraint::Natural), } } pub fn call(function: &str, args: impl IntoIterator) -> Expr { Expr::Function(Function { - name: ObjectName::from(vec![Ident::new(function)]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new(function)]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -458,52 +389,3 @@ pub fn call(function: &str, args: impl IntoIterator) -> Expr { within_group: vec![], }) } - -/// Gets the first index column (mysql calls it a key part) of the first index found in a -/// [`Statement::CreateIndex`], [`Statement::CreateTable`], or [`Statement::AlterTable`]. -pub fn index_column(stmt: Statement) -> Expr { - match stmt { - Statement::CreateIndex(CreateIndex { columns, .. }) => { - columns.first().unwrap().column.expr.clone() - } - Statement::CreateTable(CreateTable { constraints, .. }) => { - match constraints.first().unwrap() { - TableConstraint::Index { columns, .. } => { - columns.first().unwrap().column.expr.clone() - } - TableConstraint::Unique { columns, .. } => { - columns.first().unwrap().column.expr.clone() - } - TableConstraint::PrimaryKey { columns, .. } => { - columns.first().unwrap().column.expr.clone() - } - TableConstraint::FulltextOrSpatial { columns, .. } => { - columns.first().unwrap().column.expr.clone() - } - _ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"), - } - } - Statement::AlterTable { operations, .. } => match operations.first().unwrap() { - AlterTableOperation::AddConstraint { constraint, .. } => { - match constraint { - TableConstraint::Index { columns, .. } => { - columns.first().unwrap().column.expr.clone() - } - TableConstraint::Unique { columns, .. } => { - columns.first().unwrap().column.expr.clone() - } - TableConstraint::PrimaryKey { columns, .. } => { - columns.first().unwrap().column.expr.clone() - } - TableConstraint::FulltextOrSpatial { - columns, - .. - } => columns.first().unwrap().column.expr.clone(), - _ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"), - } - } - _ => panic!("Expected a constraint"), - }, - _ => panic!("Expected CREATE INDEX, ALTER TABLE, or CREATE TABLE, got: {stmt:?}"), - } -} diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 8382a534..4186ec82 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -29,10 +29,10 @@ use alloc::{ vec, vec::Vec, }; +use core::fmt; use core::iter::Peekable; use core::num::NonZeroU8; use core::str::Chars; -use core::{cmp, fmt}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -40,13 +40,13 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; +use crate::ast::DollarQuotedString; use crate::dialect::Dialect; use crate::dialect::{ BigQueryDialect, DuckDbDialect, GenericDialect, MySqlDialect, PostgreSqlDialect, SnowflakeDialect, }; use crate::keywords::{Keyword, ALL_KEYWORDS, ALL_KEYWORDS_INDEX}; -use crate::{ast::DollarQuotedString, dialect::HiveDialect}; /// SQL Token enumeration #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -170,10 +170,8 @@ pub enum Token { RBrace, /// Right Arrow `=>` RArrow, - /// Sharp `#` used for PostgreSQL Bitwise XOR operator, also PostgreSQL/Redshift geometrical unary/binary operator (Number of points in path or polygon/Intersection) + /// Sharp `#` used for PostgreSQL Bitwise XOR operator Sharp, - /// `##` PostgreSQL/Redshift geometrical binary operator (Point of closest proximity) - DoubleSharp, /// Tilde `~` used for PostgreSQL Bitwise NOT operator or case sensitive match regular expression operator Tilde, /// `~*` , a case insensitive match regular expression operator in PostgreSQL @@ -200,7 +198,7 @@ pub enum Token { ExclamationMark, /// Double Exclamation Mark `!!` used for PostgreSQL prefix factorial operator DoubleExclamationMark, - /// AtSign `@` used for PostgreSQL abs operator, also PostgreSQL/Redshift geometrical unary/binary operator (Center, Contained or on) + /// AtSign `@` used for PostgreSQL abs operator AtSign, /// `^@`, a "starts with" string operator in PostgreSQL CaretAt, @@ -216,38 +214,6 @@ pub enum Token { LongArrow, /// `#>`, extracts JSON sub-object at the specified path HashArrow, - /// `@-@` PostgreSQL/Redshift geometrical unary operator (Length or circumference) - AtDashAt, - /// `?-` PostgreSQL/Redshift geometrical unary/binary operator (Is horizontal?/Are horizontally aligned?) - QuestionMarkDash, - /// `&<` PostgreSQL/Redshift geometrical binary operator (Overlaps to left?) - AmpersandLeftAngleBracket, - /// `&>` PostgreSQL/Redshift geometrical binary operator (Overlaps to right?)` - AmpersandRightAngleBracket, - /// `&<|` PostgreSQL/Redshift geometrical binary operator (Does not extend above?)` - AmpersandLeftAngleBracketVerticalBar, - /// `|&>` PostgreSQL/Redshift geometrical binary operator (Does not extend below?)` - VerticalBarAmpersandRightAngleBracket, - /// `<->` PostgreSQL/Redshift geometrical binary operator (Distance between) - TwoWayArrow, - /// `<^` PostgreSQL/Redshift geometrical binary operator (Is below?) - LeftAngleBracketCaret, - /// `>^` PostgreSQL/Redshift geometrical binary operator (Is above?) - RightAngleBracketCaret, - /// `?#` PostgreSQL/Redshift geometrical binary operator (Intersects or overlaps) - QuestionMarkSharp, - /// `?-|` PostgreSQL/Redshift geometrical binary operator (Is perpendicular?) - QuestionMarkDashVerticalBar, - /// `?||` PostgreSQL/Redshift geometrical binary operator (Are parallel?) - QuestionMarkDoubleVerticalBar, - /// `~=` PostgreSQL/Redshift geometrical binary operator (Same as) - TildeEqual, - /// `<<| PostgreSQL/Redshift geometrical binary operator (Is strictly below?) - ShiftLeftVerticalBar, - /// `|>> PostgreSQL/Redshift geometrical binary operator (Is strictly above?) - VerticalBarShiftRight, - /// `|> BigQuery pipe operator - VerticalBarRightAngleBracket, /// `#>>`, extracts JSON sub-object at the specified path as text HashLongArrow, /// jsonb @> jsonb -> boolean: Test whether left json contains the right json @@ -337,7 +303,6 @@ impl fmt::Display for Token { Token::RBrace => f.write_str("}"), Token::RArrow => f.write_str("=>"), Token::Sharp => f.write_str("#"), - Token::DoubleSharp => f.write_str("##"), Token::ExclamationMark => f.write_str("!"), Token::DoubleExclamationMark => f.write_str("!!"), Token::Tilde => f.write_str("~"), @@ -355,22 +320,6 @@ impl fmt::Display for Token { Token::Overlap => f.write_str("&&"), Token::PGSquareRoot => f.write_str("|/"), Token::PGCubeRoot => f.write_str("||/"), - Token::AtDashAt => f.write_str("@-@"), - Token::QuestionMarkDash => f.write_str("?-"), - Token::AmpersandLeftAngleBracket => f.write_str("&<"), - Token::AmpersandRightAngleBracket => f.write_str("&>"), - Token::AmpersandLeftAngleBracketVerticalBar => f.write_str("&<|"), - Token::VerticalBarAmpersandRightAngleBracket => f.write_str("|&>"), - Token::VerticalBarRightAngleBracket => f.write_str("|>"), - Token::TwoWayArrow => f.write_str("<->"), - Token::LeftAngleBracketCaret => f.write_str("<^"), - Token::RightAngleBracketCaret => f.write_str(">^"), - Token::QuestionMarkSharp => f.write_str("?#"), - Token::QuestionMarkDashVerticalBar => f.write_str("?-|"), - Token::QuestionMarkDoubleVerticalBar => f.write_str("?||"), - Token::TildeEqual => f.write_str("~="), - Token::ShiftLeftVerticalBar => f.write_str("<<|"), - Token::VerticalBarShiftRight => f.write_str("|>>"), Token::Placeholder(ref s) => write!(f, "{s}"), Token::Arrow => write!(f, "->"), Token::LongArrow => write!(f, "->>"), @@ -473,253 +422,61 @@ impl fmt::Display for Whitespace { } /// Location in input string -/// -/// # Create an "empty" (unknown) `Location` -/// ``` -/// # use sqlparser::tokenizer::Location; -/// let location = Location::empty(); -/// ``` -/// -/// # Create a `Location` from a line and column -/// ``` -/// # use sqlparser::tokenizer::Location; -/// let location = Location::new(1, 1); -/// ``` -/// -/// # Create a `Location` from a pair -/// ``` -/// # use sqlparser::tokenizer::Location; -/// let location = Location::from((1, 1)); -/// ``` -#[derive(Eq, PartialEq, Hash, Clone, Copy, Ord, PartialOrd)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +#[derive(Debug, Eq, PartialEq, Clone, Copy)] pub struct Location { - /// Line number, starting from 1. - /// - /// Note: Line 0 is used for empty spans + /// Line number, starting from 1 pub line: u64, - /// Line column, starting from 1. - /// - /// Note: Column 0 is used for empty spans + /// Line column, starting from 1 pub column: u64, } impl fmt::Display for Location { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if self.line == 0 { return Ok(()); } - write!(f, " at Line: {}, Column: {}", self.line, self.column) + write!( + f, + // TODO: use standard compiler location syntax (::) + " at Line: {}, Column: {}", + self.line, self.column, + ) } } -impl fmt::Debug for Location { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "Location({},{})", self.line, self.column) - } -} - -impl Location { - /// Return an "empty" / unknown location - pub fn empty() -> Self { - Self { line: 0, column: 0 } - } - - /// Create a new `Location` for a given line and column - pub fn new(line: u64, column: u64) -> Self { - Self { line, column } - } - - /// Create a new location for a given line and column - /// - /// Alias for [`Self::new`] - // TODO: remove / deprecate in favor of` `new` for consistency? - pub fn of(line: u64, column: u64) -> Self { - Self::new(line, column) - } - - /// Combine self and `end` into a new `Span` - pub fn span_to(self, end: Self) -> Span { - Span { start: self, end } - } -} - -impl From<(u64, u64)> for Location { - fn from((line, column): (u64, u64)) -> Self { - Self { line, column } - } -} - -/// A span represents a linear portion of the input string (start, end) -/// -/// See [Spanned](crate::ast::Spanned) for more information. -#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord, Copy)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct Span { - pub start: Location, - pub end: Location, -} - -impl fmt::Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "Span({:?}..{:?})", self.start, self.end) - } -} - -impl Span { - // An empty span (0, 0) -> (0, 0) - // We need a const instance for pattern matching - const EMPTY: Span = Self::empty(); - - /// Create a new span from a start and end [`Location`] - pub fn new(start: Location, end: Location) -> Span { - Span { start, end } - } - - /// Returns an empty span `(0, 0) -> (0, 0)` - /// - /// Empty spans represent no knowledge of source location - /// See [Spanned](crate::ast::Spanned) for more information. - pub const fn empty() -> Span { - Span { - start: Location { line: 0, column: 0 }, - end: Location { line: 0, column: 0 }, - } - } - - /// Returns the smallest Span that contains both `self` and `other` - /// If either span is [Span::empty], the other span is returned - /// - /// # Examples - /// ``` - /// # use sqlparser::tokenizer::{Span, Location}; - /// // line 1, column1 -> line 2, column 5 - /// let span1 = Span::new(Location::new(1, 1), Location::new(2, 5)); - /// // line 2, column 3 -> line 3, column 7 - /// let span2 = Span::new(Location::new(2, 3), Location::new(3, 7)); - /// // Union of the two is the min/max of the two spans - /// // line 1, column 1 -> line 3, column 7 - /// let union = span1.union(&span2); - /// assert_eq!(union, Span::new(Location::new(1, 1), Location::new(3, 7))); - /// ``` - pub fn union(&self, other: &Span) -> Span { - // If either span is empty, return the other - // this prevents propagating (0, 0) through the tree - match (self, other) { - (&Span::EMPTY, _) => *other, - (_, &Span::EMPTY) => *self, - _ => Span { - start: cmp::min(self.start, other.start), - end: cmp::max(self.end, other.end), - }, - } - } - - /// Same as [Span::union] for `Option` - /// - /// If `other` is `None`, `self` is returned - pub fn union_opt(&self, other: &Option) -> Span { - match other { - Some(other) => self.union(other), - None => *self, - } - } - - /// Return the [Span::union] of all spans in the iterator - /// - /// If the iterator is empty, an empty span is returned - /// - /// # Example - /// ``` - /// # use sqlparser::tokenizer::{Span, Location}; - /// let spans = vec![ - /// Span::new(Location::new(1, 1), Location::new(2, 5)), - /// Span::new(Location::new(2, 3), Location::new(3, 7)), - /// Span::new(Location::new(3, 1), Location::new(4, 2)), - /// ]; - /// // line 1, column 1 -> line 4, column 2 - /// assert_eq!( - /// Span::union_iter(spans), - /// Span::new(Location::new(1, 1), Location::new(4, 2)) - /// ); - pub fn union_iter>(iter: I) -> Span { - iter.into_iter() - .reduce(|acc, item| acc.union(&item)) - .unwrap_or(Span::empty()) - } -} - -/// Backwards compatibility struct for [`TokenWithSpan`] -#[deprecated(since = "0.53.0", note = "please use `TokenWithSpan` instead")] -pub type TokenWithLocation = TokenWithSpan; - -/// A [Token] with [Span] attached to it -/// -/// This is used to track the location of a token in the input string -/// -/// # Examples -/// ``` -/// # use sqlparser::tokenizer::{Location, Span, Token, TokenWithSpan}; -/// // commas @ line 1, column 10 -/// let tok1 = TokenWithSpan::new( -/// Token::Comma, -/// Span::new(Location::new(1, 10), Location::new(1, 11)), -/// ); -/// assert_eq!(tok1, Token::Comma); // can compare the token -/// -/// // commas @ line 2, column 20 -/// let tok2 = TokenWithSpan::new( -/// Token::Comma, -/// Span::new(Location::new(2, 20), Location::new(2, 21)), -/// ); -/// // same token but different locations are not equal -/// assert_ne!(tok1, tok2); -/// ``` -#[derive(Debug, Clone, Hash, Ord, PartialOrd, Eq, PartialEq)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct TokenWithSpan { +/// A [Token] with [Location] attached to it +#[derive(Debug, Eq, PartialEq, Clone)] +pub struct TokenWithLocation { pub token: Token, - pub span: Span, + pub location: Location, } -impl TokenWithSpan { - /// Create a new [`TokenWithSpan`] from a [`Token`] and a [`Span`] - pub fn new(token: Token, span: Span) -> Self { - Self { token, span } +impl TokenWithLocation { + pub fn new(token: Token, line: u64, column: u64) -> TokenWithLocation { + TokenWithLocation { + token, + location: Location { line, column }, + } } - /// Wrap a token with an empty span - pub fn wrap(token: Token) -> Self { - Self::new(token, Span::empty()) - } - - /// Wrap a token with a location from `start` to `end` - pub fn at(token: Token, start: Location, end: Location) -> Self { - Self::new(token, Span::new(start, end)) - } - - /// Return an EOF token with no location - pub fn new_eof() -> Self { - Self::wrap(Token::EOF) + pub fn wrap(token: Token) -> TokenWithLocation { + TokenWithLocation::new(token, 0, 0) } } -impl PartialEq for TokenWithSpan { +impl PartialEq for TokenWithLocation { fn eq(&self, other: &Token) -> bool { &self.token == other } } -impl PartialEq for Token { - fn eq(&self, other: &TokenWithSpan) -> bool { +impl PartialEq for Token { + fn eq(&self, other: &TokenWithLocation) -> bool { self == &other.token } } -impl fmt::Display for TokenWithSpan { +impl fmt::Display for TokenWithLocation { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.token.fmt(f) } @@ -747,7 +504,7 @@ struct State<'a> { pub col: u64, } -impl State<'_> { +impl<'a> State<'a> { /// return the next character and advance the stream pub fn next(&mut self) -> Option { match self.peekable.next() { @@ -879,8 +636,8 @@ impl<'a> Tokenizer<'a> { } /// Tokenize the statement and produce a vector of tokens with location information - pub fn tokenize_with_location(&mut self) -> Result, TokenizerError> { - let mut tokens: Vec = vec![]; + pub fn tokenize_with_location(&mut self) -> Result, TokenizerError> { + let mut tokens: Vec = vec![]; self.tokenize_with_location_into_buf(&mut tokens) .map(|_| tokens) } @@ -889,7 +646,7 @@ impl<'a> Tokenizer<'a> { /// If an error is thrown, the buffer will contain all tokens that were successfully parsed before the error. pub fn tokenize_with_location_into_buf( &mut self, - buf: &mut Vec, + buf: &mut Vec, ) -> Result<(), TokenizerError> { let mut state = State { peekable: self.query.chars().peekable(), @@ -898,10 +655,8 @@ impl<'a> Tokenizer<'a> { }; let mut location = state.location(); - while let Some(token) = self.next_token(&mut state, buf.last().map(|t| &t.token))? { - let span = location.span_to(state.location()); - - buf.push(TokenWithSpan { token, span }); + while let Some(token) = self.next_token(&mut state)? { + buf.push(TokenWithLocation { token, location }); location = state.location(); } @@ -935,11 +690,7 @@ impl<'a> Tokenizer<'a> { } /// Get the next token or return None - fn next_token( - &self, - chars: &mut State, - prev_token: Option<&Token>, - ) -> Result, TokenizerError> { + fn next_token(&self, chars: &mut State) -> Result, TokenizerError> { match chars.peek() { Some(&ch) => match ch { ' ' => self.consume_and_return(chars, Token::Whitespace(Whitespace::Space)), @@ -953,9 +704,8 @@ impl<'a> Tokenizer<'a> { } Ok(Some(Token::Whitespace(Whitespace::Newline))) } - // BigQuery and MySQL use b or B for byte string literal, Postgres for bit strings - b @ 'B' | b @ 'b' if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | MySqlDialect | GenericDialect) => - { + // BigQuery uses b or B for byte string literal + b @ 'B' | b @ 'b' if dialect_of!(self is BigQueryDialect | GenericDialect) => { chars.next(); // consume match chars.peek() { Some('\'') => { @@ -1026,10 +776,7 @@ impl<'a> Tokenizer<'a> { match chars.peek() { Some('\'') => { // N'...' - a - let backslash_escape = - self.dialect.supports_string_literal_backslash_escape(); - let s = - self.tokenize_single_quoted_string(chars, '\'', backslash_escape)?; + let s = self.tokenize_single_quoted_string(chars, '\'', true)?; Ok(Some(Token::NationalStringLiteral(s))) } _ => { @@ -1040,7 +787,7 @@ impl<'a> Tokenizer<'a> { } } // PostgreSQL accepts "escape" string constants, which are an extension to the SQL standard. - x @ 'e' | x @ 'E' if self.dialect.supports_string_escape_constant() => { + x @ 'e' | x @ 'E' => { let starting_loc = chars.location(); chars.next(); // consume, to check the next char match chars.peek() { @@ -1133,98 +880,34 @@ impl<'a> Tokenizer<'a> { Ok(Some(Token::DoubleQuotedString(s))) } // delimited (quoted) identifier - quote_start if self.dialect.is_delimited_identifier_start(ch) => { - let word = self.tokenize_quoted_identifier(quote_start, chars)?; - Ok(Some(Token::make_word(&word, Some(quote_start)))) - } - // Potentially nested delimited (quoted) identifier quote_start - if self - .dialect - .is_nested_delimited_identifier_start(quote_start) + if self.dialect.is_delimited_identifier_start(ch) && self .dialect - .peek_nested_delimited_identifier_quotes(chars.peekable.clone()) - .is_some() => + .is_proper_identifier_inside_quotes(chars.peekable.clone()) => { - let Some((quote_start, nested_quote_start)) = self - .dialect - .peek_nested_delimited_identifier_quotes(chars.peekable.clone()) - else { - return self.tokenizer_error( - chars.location(), - format!("Expected nested delimiter '{quote_start}' before EOF."), - ); - }; - - let Some(nested_quote_start) = nested_quote_start else { - let word = self.tokenize_quoted_identifier(quote_start, chars)?; - return Ok(Some(Token::make_word(&word, Some(quote_start)))); - }; - - let mut word = vec![]; - let quote_end = Word::matching_end_quote(quote_start); - let nested_quote_end = Word::matching_end_quote(nested_quote_start); let error_loc = chars.location(); + chars.next(); // consume the opening quote + let quote_end = Word::matching_end_quote(quote_start); + let (s, last_char) = self.parse_quoted_ident(chars, quote_end); - chars.next(); // skip the first delimiter - peeking_take_while(chars, |ch| ch.is_whitespace()); - if chars.peek() != Some(&nested_quote_start) { - return self.tokenizer_error( - error_loc, - format!("Expected nested delimiter '{nested_quote_start}' before EOF."), - ); - } - word.push(nested_quote_start.into()); - word.push(self.tokenize_quoted_identifier(nested_quote_end, chars)?); - word.push(nested_quote_end.into()); - peeking_take_while(chars, |ch| ch.is_whitespace()); - if chars.peek() != Some("e_end) { - return self.tokenizer_error( + if last_char == Some(quote_end) { + Ok(Some(Token::make_word(&s, Some(quote_start)))) + } else { + self.tokenizer_error( error_loc, format!("Expected close delimiter '{quote_end}' before EOF."), - ); + ) } - chars.next(); // skip close delimiter - - Ok(Some(Token::make_word(&word.concat(), Some(quote_start)))) } // numbers and period '0'..='9' | '.' => { - // special case where if ._ is encountered after a word then that word - // is a table and the _ is the start of the col name. - // if the prev token is not a word, then this is not a valid sql - // word or number. - if ch == '.' && chars.peekable.clone().nth(1) == Some('_') { - if let Some(Token::Word(_)) = prev_token { - chars.next(); - return Ok(Some(Token::Period)); - } - - return self.tokenizer_error( - chars.location(), - "Unexpected character '_'".to_string(), - ); - } - - // Some dialects support underscore as number separator - // There can only be one at a time and it must be followed by another digit - let is_number_separator = |ch: char, next_char: Option| { - self.dialect.supports_numeric_literal_underscores() - && ch == '_' - && next_char.is_some_and(|next_ch| next_ch.is_ascii_hexdigit()) - }; - - let mut s = peeking_next_take_while(chars, |ch, next_ch| { - ch.is_ascii_digit() || is_number_separator(ch, next_ch) - }); + let mut s = peeking_take_while(chars, |ch| ch.is_ascii_digit()); // match binary literal that starts with 0x if s == "0" && chars.peek() == Some(&'x') { chars.next(); - let s2 = peeking_next_take_while(chars, |ch, next_ch| { - ch.is_ascii_hexdigit() || is_number_separator(ch, next_ch) - }); + let s2 = peeking_take_while(chars, |ch| ch.is_ascii_hexdigit()); return Ok(Some(Token::HexStringLiteral(s2))); } @@ -1233,30 +916,15 @@ impl<'a> Tokenizer<'a> { s.push('.'); chars.next(); } + s += &peeking_take_while(chars, |ch| ch.is_ascii_digit()); - // If the dialect supports identifiers that start with a numeric prefix - // and we have now consumed a dot, check if the previous token was a Word. - // If so, what follows is definitely not part of a decimal number and - // we should yield the dot as a dedicated token so compound identifiers - // starting with digits can be parsed correctly. - if s == "." && self.dialect.supports_numeric_prefix() { - if let Some(Token::Word(_)) = prev_token { - return Ok(Some(Token::Period)); - } - } - - // Consume fractional digits. - s += &peeking_next_take_while(chars, |ch, next_ch| { - ch.is_ascii_digit() || is_number_separator(ch, next_ch) - }); - - // No fraction -> Token::Period + // No number -> Token::Period if s == "." { return Ok(Some(Token::Period)); } - // Parse exponent as number let mut exponent_part = String::new(); + // Parse exponent as number if chars.peek() == Some(&'e') || chars.peek() == Some(&'E') { let mut char_clone = chars.peekable.clone(); exponent_part.push(char_clone.next().unwrap()); @@ -1285,23 +953,14 @@ impl<'a> Tokenizer<'a> { } } - // If the dialect supports identifiers that start with a numeric prefix, - // we need to check if the value is in fact an identifier and must thus - // be tokenized as a word. - if self.dialect.supports_numeric_prefix() { - if exponent_part.is_empty() { - // If it is not a number with an exponent, it may be - // an identifier starting with digits. - let word = - peeking_take_while(chars, |ch| self.dialect.is_identifier_part(ch)); + // mysql dialect supports identifiers that start with a numeric prefix, + // as long as they aren't an exponent number. + if self.dialect.supports_numeric_prefix() && exponent_part.is_empty() { + let word = + peeking_take_while(chars, |ch| self.dialect.is_identifier_part(ch)); - if !word.is_empty() { - s += word.as_str(); - return Ok(Some(Token::make_word(s.as_str(), None))); - } - } else if prev_token == Some(&Token::Period) { - // If the previous token was a period, thus not belonging to a number, - // the value we have is part of an identifier. + if !word.is_empty() { + s += word.as_str(); return Ok(Some(Token::make_word(s.as_str(), None))); } } @@ -1321,26 +980,14 @@ impl<'a> Tokenizer<'a> { // operators '-' => { chars.next(); // consume the '-' - match chars.peek() { Some('-') => { - let mut is_comment = true; - if self.dialect.requires_single_line_comment_whitespace() { - is_comment = Some(' ') == chars.peekable.clone().nth(1); - } - - if is_comment { - chars.next(); // consume second '-' - let comment = self.tokenize_single_line_comment(chars); - return Ok(Some(Token::Whitespace( - Whitespace::SingleLineComment { - prefix: "--".to_owned(), - comment, - }, - ))); - } - - self.start_binop(chars, "-", Token::Minus) + chars.next(); // consume the second '-', starting a single-line comment + let comment = self.tokenize_single_line_comment(chars); + Ok(Some(Token::Whitespace(Whitespace::SingleLineComment { + prefix: "--".to_owned(), + comment, + }))) } Some('>') => { chars.next(); @@ -1400,31 +1047,6 @@ impl<'a> Tokenizer<'a> { _ => self.start_binop(chars, "||", Token::StringConcat), } } - Some('&') if self.dialect.supports_geometric_types() => { - chars.next(); // consume - match chars.peek() { - Some('>') => self.consume_for_binop( - chars, - "|&>", - Token::VerticalBarAmpersandRightAngleBracket, - ), - _ => self.start_binop_opt(chars, "|&", None), - } - } - Some('>') if self.dialect.supports_geometric_types() => { - chars.next(); // consume - match chars.peek() { - Some('>') => self.consume_for_binop( - chars, - "|>>", - Token::VerticalBarShiftRight, - ), - _ => self.start_binop_opt(chars, "|>", None), - } - } - Some('>') if self.dialect.supports_pipe_operator() => { - self.consume_for_binop(chars, "|>", Token::VerticalBarRightAngleBracket) - } // Bitshift '|' operator _ => self.start_binop(chars, "|", Token::Pipe), } @@ -1473,34 +1095,8 @@ impl<'a> Tokenizer<'a> { _ => self.start_binop(chars, "<=", Token::LtEq), } } - Some('|') if self.dialect.supports_geometric_types() => { - self.consume_for_binop(chars, "<<|", Token::ShiftLeftVerticalBar) - } Some('>') => self.consume_for_binop(chars, "<>", Token::Neq), - Some('<') if self.dialect.supports_geometric_types() => { - chars.next(); // consume - match chars.peek() { - Some('|') => self.consume_for_binop( - chars, - "<<|", - Token::ShiftLeftVerticalBar, - ), - _ => self.start_binop(chars, "<<", Token::ShiftLeft), - } - } Some('<') => self.consume_for_binop(chars, "<<", Token::ShiftLeft), - Some('-') if self.dialect.supports_geometric_types() => { - chars.next(); // consume - match chars.peek() { - Some('>') => { - self.consume_for_binop(chars, "<->", Token::TwoWayArrow) - } - _ => self.start_binop_opt(chars, "<-", None), - } - } - Some('^') if self.dialect.supports_geometric_types() => { - self.consume_for_binop(chars, "<^", Token::LeftAngleBracketCaret) - } Some('@') => self.consume_for_binop(chars, "<@", Token::ArrowAt), _ => self.start_binop(chars, "<", Token::Lt), } @@ -1510,9 +1106,6 @@ impl<'a> Tokenizer<'a> { match chars.peek() { Some('=') => self.consume_for_binop(chars, ">=", Token::GtEq), Some('>') => self.consume_for_binop(chars, ">>", Token::ShiftRight), - Some('^') if self.dialect.supports_geometric_types() => { - self.consume_for_binop(chars, ">^", Token::RightAngleBracketCaret) - } _ => self.start_binop(chars, ">", Token::Gt), } } @@ -1531,22 +1124,6 @@ impl<'a> Tokenizer<'a> { '&' => { chars.next(); // consume the '&' match chars.peek() { - Some('>') if self.dialect.supports_geometric_types() => { - chars.next(); - self.consume_and_return(chars, Token::AmpersandRightAngleBracket) - } - Some('<') if self.dialect.supports_geometric_types() => { - chars.next(); // consume - match chars.peek() { - Some('|') => self.consume_and_return( - chars, - Token::AmpersandLeftAngleBracketVerticalBar, - ), - _ => { - self.start_binop(chars, "&<", Token::AmpersandLeftAngleBracket) - } - } - } Some('&') => { chars.next(); // consume the second '&' self.start_binop(chars, "&&", Token::Overlap) @@ -1564,8 +1141,7 @@ impl<'a> Tokenizer<'a> { } '{' => self.consume_and_return(chars, Token::LBrace), '}' => self.consume_and_return(chars, Token::RBrace), - '#' if dialect_of!(self is SnowflakeDialect | BigQueryDialect | MySqlDialect | HiveDialect) => - { + '#' if dialect_of!(self is SnowflakeDialect | BigQueryDialect | MySqlDialect) => { chars.next(); // consume the '#', starting a snowflake single-line comment let comment = self.tokenize_single_line_comment(chars); Ok(Some(Token::Whitespace(Whitespace::SingleLineComment { @@ -1577,9 +1153,6 @@ impl<'a> Tokenizer<'a> { chars.next(); // consume match chars.peek() { Some('*') => self.consume_for_binop(chars, "~*", Token::TildeAsterisk), - Some('=') if self.dialect.supports_geometric_types() => { - self.consume_for_binop(chars, "~=", Token::TildeEqual) - } Some('~') => { chars.next(); match chars.peek() { @@ -1606,9 +1179,6 @@ impl<'a> Tokenizer<'a> { } } Some(' ') => Ok(Some(Token::Sharp)), - Some('#') if self.dialect.supports_geometric_types() => { - self.consume_for_binop(chars, "##", Token::DoubleSharp) - } Some(sch) if self.dialect.is_identifier_start('#') => { self.tokenize_identifier_or_keyword([ch, *sch], chars) } @@ -1618,16 +1188,6 @@ impl<'a> Tokenizer<'a> { '@' => { chars.next(); match chars.peek() { - Some('@') if self.dialect.supports_geometric_types() => { - self.consume_and_return(chars, Token::AtAt) - } - Some('-') if self.dialect.supports_geometric_types() => { - chars.next(); - match chars.peek() { - Some('@') => self.consume_and_return(chars, Token::AtDashAt), - _ => self.start_binop_opt(chars, "@-", None), - } - } Some('>') => self.consume_and_return(chars, Token::AtArrow), Some('?') => self.consume_and_return(chars, Token::AtQuestion), Some('@') => { @@ -1641,18 +1201,6 @@ impl<'a> Tokenizer<'a> { } } Some(' ') => Ok(Some(Token::AtSign)), - // We break on quotes here, because no dialect allows identifiers starting - // with @ and containing quotation marks (e.g. `@'foo'`) unless they are - // quoted, which is tokenized as a quoted string, not here (e.g. - // `"@'foo'"`). Further, at least two dialects parse `@` followed by a - // quoted string as two separate tokens, which this allows. For example, - // Postgres parses `@'1'` as the absolute value of '1' which is implicitly - // cast to a numeric type. And when parsing MySQL-style grantees (e.g. - // `GRANT ALL ON *.* to 'root'@'localhost'`), we also want separate tokens - // for the user, the `@`, and the host. - Some('\'') => Ok(Some(Token::AtSign)), - Some('\"') => Ok(Some(Token::AtSign)), - Some('`') => Ok(Some(Token::AtSign)), Some(sch) if self.dialect.is_identifier_start('@') => { self.tokenize_identifier_or_keyword([ch, *sch], chars) } @@ -1660,30 +1208,11 @@ impl<'a> Tokenizer<'a> { } } // Postgres uses ? for jsonb operators, not prepared statements - '?' if self.dialect.supports_geometric_types() => { - chars.next(); // consume + '?' if dialect_of!(self is PostgreSqlDialect) => { + chars.next(); match chars.peek() { - Some('|') => { - chars.next(); - match chars.peek() { - Some('|') => self.consume_and_return( - chars, - Token::QuestionMarkDoubleVerticalBar, - ), - _ => Ok(Some(Token::QuestionPipe)), - } - } - + Some('|') => self.consume_and_return(chars, Token::QuestionPipe), Some('&') => self.consume_and_return(chars, Token::QuestionAnd), - Some('-') => { - chars.next(); // consume - match chars.peek() { - Some('|') => self - .consume_and_return(chars, Token::QuestionMarkDashVerticalBar), - _ => Ok(Some(Token::QuestionMarkDash)), - } - } - Some('#') => self.consume_and_return(chars, Token::QuestionMarkSharp), _ => self.consume_and_return(chars, Token::Question), } } @@ -1699,7 +1228,7 @@ impl<'a> Tokenizer<'a> { } '$' => Ok(Some(self.tokenize_dollar_preceded_value(chars)?)), - // whitespace check (including unicode chars) should be last as it covers some of the chars above + //whitespace check (including unicode chars) should be last as it covers some of the chars above ch if ch.is_whitespace() => { self.consume_and_return(chars, Token::Whitespace(Whitespace::Space)) } @@ -1717,7 +1246,7 @@ impl<'a> Tokenizer<'a> { default: Token, ) -> Result, TokenizerError> { chars.next(); // consume the first char - self.start_binop_opt(chars, prefix, Some(default)) + self.start_binop(chars, prefix, default) } /// parse a custom binary operator @@ -1726,16 +1255,6 @@ impl<'a> Tokenizer<'a> { chars: &mut State, prefix: &str, default: Token, - ) -> Result, TokenizerError> { - self.start_binop_opt(chars, prefix, Some(default)) - } - - /// parse a custom binary operator - fn start_binop_opt( - &self, - chars: &mut State, - prefix: &str, - default: Option, ) -> Result, TokenizerError> { let mut custom = None; while let Some(&ch) = chars.peek() { @@ -1746,14 +1265,10 @@ impl<'a> Tokenizer<'a> { custom.get_or_insert_with(|| prefix.to_string()).push(ch); chars.next(); } - match (custom, default) { - (Some(custom), _) => Ok(Token::CustomBinaryOperator(custom).into()), - (None, Some(tok)) => Ok(Some(tok)), - (None, None) => self.tokenizer_error( - chars.location(), - format!("Expected a valid binary operator after '{prefix}'"), - ), - } + + Ok(Some( + custom.map(Token::CustomBinaryOperator).unwrap_or(default), + )) } /// Tokenize dollar preceded value (i.e: a string/placeholder) @@ -1763,8 +1278,7 @@ impl<'a> Tokenizer<'a> { chars.next(); - // If the dialect does not support dollar-quoted strings, then `$$` is rather a placeholder. - if matches!(chars.peek(), Some('$')) && !self.dialect.supports_dollar_placeholder() { + if let Some('$') = chars.peek() { chars.next(); let mut is_terminated = false; @@ -1798,43 +1312,52 @@ impl<'a> Tokenizer<'a> { }; } else { value.push_str(&peeking_take_while(chars, |ch| { - ch.is_alphanumeric() - || ch == '_' - // Allow $ as a placeholder character if the dialect supports it - || matches!(ch, '$' if self.dialect.supports_dollar_placeholder()) + ch.is_alphanumeric() || ch == '_' })); - // If the dialect does not support dollar-quoted strings, don't look for the end delimiter. - if matches!(chars.peek(), Some('$')) && !self.dialect.supports_dollar_placeholder() { + if let Some('$') = chars.peek() { chars.next(); - let mut temp = String::new(); - let end_delimiter = format!("${value}$"); - - loop { - match chars.next() { - Some(ch) => { - temp.push(ch); - - if temp.ends_with(&end_delimiter) { - if let Some(temp) = temp.strip_suffix(&end_delimiter) { - s.push_str(temp); + 'searching_for_end: loop { + s.push_str(&peeking_take_while(chars, |ch| ch != '$')); + match chars.peek() { + Some('$') => { + chars.next(); + let mut maybe_s = String::from("$"); + for c in value.chars() { + if let Some(next_char) = chars.next() { + maybe_s.push(next_char); + if next_char != c { + // This doesn't match the dollar quote delimiter so this + // is not the end of the string. + s.push_str(&maybe_s); + continue 'searching_for_end; + } + } else { + return self.tokenizer_error( + chars.location(), + "Unterminated dollar-quoted, expected $", + ); } - break; + } + if chars.peek() == Some(&'$') { + chars.next(); + maybe_s.push('$'); + // maybe_s matches the end delimiter + break 'searching_for_end; + } else { + // This also doesn't match the dollar quote delimiter as there are + // more characters before the second dollar so this is not the end + // of the string. + s.push_str(&maybe_s); + continue 'searching_for_end; } } - None => { - if temp.ends_with(&end_delimiter) { - if let Some(temp) = temp.strip_suffix(&end_delimiter) { - s.push_str(temp); - } - break; - } - + _ => { return self.tokenizer_error( chars.location(), "Unterminated dollar-quoted, expected $", - ); + ) } } } @@ -1862,17 +1385,11 @@ impl<'a> Tokenizer<'a> { // Consume characters until newline fn tokenize_single_line_comment(&self, chars: &mut State) -> String { - let mut comment = peeking_take_while(chars, |ch| match ch { - '\n' => false, // Always stop at \n - '\r' if dialect_of!(self is PostgreSqlDialect) => false, // Stop at \r for Postgres - _ => true, // Keep consuming for other characters - }); - + let mut comment = peeking_take_while(chars, |ch| ch != '\n'); if let Some(ch) = chars.next() { - assert!(ch == '\n' || ch == '\r'); + assert_eq!(ch, '\n'); comment.push(ch); } - comment } @@ -1885,27 +1402,6 @@ impl<'a> Tokenizer<'a> { s } - /// Read a quoted identifier - fn tokenize_quoted_identifier( - &self, - quote_start: char, - chars: &mut State, - ) -> Result { - let error_loc = chars.location(); - chars.next(); // consume the opening quote - let quote_end = Word::matching_end_quote(quote_start); - let (s, last_char) = self.parse_quoted_ident(chars, quote_end); - - if last_char == Some(quote_end) { - Ok(s) - } else { - self.tokenizer_error( - error_loc, - format!("Expected close delimiter '{quote_end}' before EOF."), - ) - } - } - /// Read a single quoted string, starting with the opening quote. fn tokenize_escaped_single_quoted_string( &self, @@ -2058,13 +1554,8 @@ impl<'a> Tokenizer<'a> { num_consecutive_quotes = 0; if let Some(next) = chars.peek() { - if !self.unescape - || (self.dialect.ignores_wildcard_escapes() - && (*next == '%' || *next == '_')) - { - // In no-escape mode, the given query has to be saved completely - // including backslashes. Similarly, with ignore_like_wildcard_escapes, - // the backslash is not stripped. + if !self.unescape { + // In no-escape mode, the given query has to be saved completely including backslashes. s.push(ch); s.push(*next); chars.next(); // consume next @@ -2107,33 +1598,28 @@ impl<'a> Tokenizer<'a> { ) -> Result, TokenizerError> { let mut s = String::new(); let mut nested = 1; - let supports_nested_comments = self.dialect.supports_nested_comments(); + let mut last_ch = ' '; loop { match chars.next() { - Some('/') if matches!(chars.peek(), Some('*')) && supports_nested_comments => { - chars.next(); // consume the '*' - s.push('/'); - s.push('*'); - nested += 1; - } - Some('*') if matches!(chars.peek(), Some('/')) => { - chars.next(); // consume the '/' - nested -= 1; - if nested == 0 { - break Ok(Some(Token::Whitespace(Whitespace::MultiLineComment(s)))); - } - s.push('*'); - s.push('/'); - } Some(ch) => { + if last_ch == '/' && ch == '*' { + nested += 1; + } else if last_ch == '*' && ch == '/' { + nested -= 1; + if nested == 0 { + s.pop(); + break Ok(Some(Token::Whitespace(Whitespace::MultiLineComment(s)))); + } + } s.push(ch); + last_ch = ch; } None => { break self.tokenizer_error( chars.location(), "Unexpected EOF while in a multi-line comment", - ); + ) } } } @@ -2189,24 +1675,6 @@ fn peeking_take_while(chars: &mut State, mut predicate: impl FnMut(char) -> bool s } -/// Same as peeking_take_while, but also passes the next character to the predicate. -fn peeking_next_take_while( - chars: &mut State, - mut predicate: impl FnMut(char, Option) -> bool, -) -> String { - let mut s = String::new(); - while let Some(&ch) = chars.peek() { - let next_char = chars.peekable.clone().nth(1); - if predicate(ch, next_char) { - chars.next(); // consume - s.push(ch); - } else { - break; - } - } - s -} - fn unescape_single_quoted_string(chars: &mut State<'_>) -> Option { Unescape::new(chars).unescape() } @@ -2402,13 +1870,13 @@ fn take_char_from_hex_digits( location: chars.location(), })?; let digit = next_char.to_digit(16).ok_or_else(|| TokenizerError { - message: format!("Invalid hex digit in escaped unicode string: {next_char}"), + message: format!("Invalid hex digit in escaped unicode string: {}", next_char), location: chars.location(), })?; result = result * 16 + digit; } char::from_u32(result).ok_or_else(|| TokenizerError { - message: format!("Invalid unicode character: {result:x}"), + message: format!("Invalid unicode character: {:x}", result), location: chars.location(), }) } @@ -2417,9 +1885,8 @@ fn take_char_from_hex_digits( mod tests { use super::*; use crate::dialect::{ - BigQueryDialect, ClickHouseDialect, HiveDialect, MsSqlDialect, MySqlDialect, SQLiteDialect, + BigQueryDialect, ClickHouseDialect, HiveDialect, MsSqlDialect, MySqlDialect, }; - use crate::test_utils::all_dialects_where; use core::fmt::Debug; #[test] @@ -2488,41 +1955,6 @@ mod tests { compare(expected, tokens); } - #[test] - fn tokenize_numeric_literal_underscore() { - let dialect = GenericDialect {}; - let sql = String::from("SELECT 10_000"); - let mut tokenizer = Tokenizer::new(&dialect, &sql); - let tokens = tokenizer.tokenize().unwrap(); - let expected = vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::Number("10".to_string(), false), - Token::make_word("_000", None), - ]; - compare(expected, tokens); - - all_dialects_where(|dialect| dialect.supports_numeric_literal_underscores()).tokenizes_to( - "SELECT 10_000, _10_000, 10_00_, 10___0", - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::Number("10_000".to_string(), false), - Token::Comma, - Token::Whitespace(Whitespace::Space), - Token::make_word("_10_000", None), // leading underscore tokenizes as a word (parsed as column identifier) - Token::Comma, - Token::Whitespace(Whitespace::Space), - Token::Number("10_00".to_string(), false), - Token::make_word("_", None), // trailing underscores tokenizes as a word (syntax error in some dialects) - Token::Comma, - Token::Whitespace(Whitespace::Space), - Token::Number("10".to_string(), false), - Token::make_word("___0", None), // multiple underscores tokenizes as a word (syntax error in some dialects) - ], - ); - } - #[test] fn tokenize_select_exponent() { let sql = String::from("SELECT 1e10, 1e-10, 1e+10, 1ea, 1e-10a, 1e-10-10"); @@ -2857,67 +2289,20 @@ mod tests { #[test] fn tokenize_dollar_quoted_string_tagged() { - let test_cases = vec![ - ( - String::from("SELECT $tag$dollar '$' quoted strings have $tags like this$ or like this $$$tag$"), - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::DollarQuotedString(DollarQuotedString { - value: "dollar '$' quoted strings have $tags like this$ or like this $$".into(), - tag: Some("tag".into()), - }) - ] - ), - ( - String::from("SELECT $abc$x$ab$abc$"), - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::DollarQuotedString(DollarQuotedString { - value: "x$ab".into(), - tag: Some("abc".into()), - }) - ] - ), - ( - String::from("SELECT $abc$$abc$"), - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::DollarQuotedString(DollarQuotedString { - value: "".into(), - tag: Some("abc".into()), - }) - ] - ), - ( - String::from("0$abc$$abc$1"), - vec![ - Token::Number("0".into(), false), - Token::DollarQuotedString(DollarQuotedString { - value: "".into(), - tag: Some("abc".into()), - }), - Token::Number("1".into(), false), - ] - ), - ( - String::from("$function$abc$q$data$q$$function$"), - vec![ - Token::DollarQuotedString(DollarQuotedString { - value: "abc$q$data$q$".into(), - tag: Some("function".into()), - }), - ] - ), - ]; - + let sql = String::from( + "SELECT $tag$dollar '$' quoted strings have $tags like this$ or like this $$$tag$", + ); let dialect = GenericDialect {}; - for (sql, expected) in test_cases { - let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); - compare(expected, tokens); - } + let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); + let expected = vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::DollarQuotedString(DollarQuotedString { + value: "dollar '$' quoted strings have $tags like this$ or like this $$".into(), + tag: Some("tag".into()), + }), + ]; + compare(expected, tokens); } #[test] @@ -2936,78 +2321,6 @@ mod tests { ); } - #[test] - fn tokenize_dollar_quoted_string_tagged_unterminated_mirror() { - let sql = String::from("SELECT $abc$abc$"); - let dialect = GenericDialect {}; - assert_eq!( - Tokenizer::new(&dialect, &sql).tokenize(), - Err(TokenizerError { - message: "Unterminated dollar-quoted, expected $".into(), - location: Location { - line: 1, - column: 17 - } - }) - ); - } - - #[test] - fn tokenize_dollar_placeholder() { - let sql = String::from("SELECT $$, $$ABC$$, $ABC$, $ABC"); - let dialect = SQLiteDialect {}; - let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); - assert_eq!( - tokens, - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::Placeholder("$$".into()), - Token::Comma, - Token::Whitespace(Whitespace::Space), - Token::Placeholder("$$ABC$$".into()), - Token::Comma, - Token::Whitespace(Whitespace::Space), - Token::Placeholder("$ABC$".into()), - Token::Comma, - Token::Whitespace(Whitespace::Space), - Token::Placeholder("$ABC".into()), - ] - ); - } - - #[test] - fn tokenize_nested_dollar_quoted_strings() { - let sql = String::from("SELECT $tag$dollar $nested$ string$tag$"); - let dialect = GenericDialect {}; - let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); - let expected = vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::DollarQuotedString(DollarQuotedString { - value: "dollar $nested$ string".into(), - tag: Some("tag".into()), - }), - ]; - compare(expected, tokens); - } - - #[test] - fn tokenize_dollar_quoted_string_untagged_empty() { - let sql = String::from("SELECT $$$$"); - let dialect = GenericDialect {}; - let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); - let expected = vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::DollarQuotedString(DollarQuotedString { - value: "".into(), - tag: None, - }), - ]; - compare(expected, tokens); - } - #[test] fn tokenize_dollar_quoted_string_untagged() { let sql = @@ -3078,62 +2391,17 @@ mod tests { #[test] fn tokenize_comment() { - let test_cases = vec![ - ( - String::from("0--this is a comment\n1"), - vec![ - Token::Number("0".to_string(), false), - Token::Whitespace(Whitespace::SingleLineComment { - prefix: "--".to_string(), - comment: "this is a comment\n".to_string(), - }), - Token::Number("1".to_string(), false), - ], - ), - ( - String::from("0--this is a comment\r1"), - vec![ - Token::Number("0".to_string(), false), - Token::Whitespace(Whitespace::SingleLineComment { - prefix: "--".to_string(), - comment: "this is a comment\r1".to_string(), - }), - ], - ), - ( - String::from("0--this is a comment\r\n1"), - vec![ - Token::Number("0".to_string(), false), - Token::Whitespace(Whitespace::SingleLineComment { - prefix: "--".to_string(), - comment: "this is a comment\r\n".to_string(), - }), - Token::Number("1".to_string(), false), - ], - ), - ]; + let sql = String::from("0--this is a comment\n1"); let dialect = GenericDialect {}; - - for (sql, expected) in test_cases { - let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); - compare(expected, tokens); - } - } - - #[test] - fn tokenize_comment_postgres() { - let sql = String::from("1--\r0"); - - let dialect = PostgreSqlDialect {}; let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); let expected = vec![ - Token::Number("1".to_string(), false), + Token::Number("0".to_string(), false), Token::Whitespace(Whitespace::SingleLineComment { prefix: "--".to_string(), - comment: "\r".to_string(), + comment: "this is a comment\n".to_string(), }), - Token::Number("0".to_string(), false), + Token::Number("1".to_string(), false), ]; compare(expected, tokens); } @@ -3169,90 +2437,18 @@ mod tests { #[test] fn tokenize_nested_multiline_comment() { - let dialect = GenericDialect {}; - let test_cases = vec![ - ( - "0/*multi-line\n* \n/* comment \n /*comment*/*/ */ /comment*/1", - vec![ - Token::Number("0".to_string(), false), - Token::Whitespace(Whitespace::MultiLineComment( - "multi-line\n* \n/* comment \n /*comment*/*/ ".into(), - )), - Token::Whitespace(Whitespace::Space), - Token::Div, - Token::Word(Word { - value: "comment".to_string(), - quote_style: None, - keyword: Keyword::COMMENT, - }), - Token::Mul, - Token::Div, - Token::Number("1".to_string(), false), - ], - ), - ( - "0/*multi-line\n* \n/* comment \n /*comment/**/ */ /comment*/*/1", - vec![ - Token::Number("0".to_string(), false), - Token::Whitespace(Whitespace::MultiLineComment( - "multi-line\n* \n/* comment \n /*comment/**/ */ /comment*/".into(), - )), - Token::Number("1".to_string(), false), - ], - ), - ( - "SELECT 1/* a /* b */ c */0", - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::Number("1".to_string(), false), - Token::Whitespace(Whitespace::MultiLineComment(" a /* b */ c ".to_string())), - Token::Number("0".to_string(), false), - ], - ), - ]; - - for (sql, expected) in test_cases { - let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); - compare(expected, tokens); - } - } - - #[test] - fn tokenize_nested_multiline_comment_empty() { - let sql = "select 1/*/**/*/0"; + let sql = String::from("0/*multi-line\n* \n/* comment \n /*comment*/*/ */ /comment*/1"); let dialect = GenericDialect {}; - let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); + let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); let expected = vec![ - Token::make_keyword("select"), - Token::Whitespace(Whitespace::Space), - Token::Number("1".to_string(), false), - Token::Whitespace(Whitespace::MultiLineComment("/**/".to_string())), Token::Number("0".to_string(), false), - ]; - - compare(expected, tokens); - } - - #[test] - fn tokenize_nested_comments_if_not_supported() { - let dialect = SQLiteDialect {}; - let sql = "SELECT 1/*/* nested comment */*/0"; - let tokens = Tokenizer::new(&dialect, sql).tokenize(); - let expected = vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::Number("1".to_string(), false), Token::Whitespace(Whitespace::MultiLineComment( - "/* nested comment ".to_string(), + "multi-line\n* \n/* comment \n /*comment*/*/ */ /comment".to_string(), )), - Token::Mul, - Token::Div, - Token::Number("0".to_string(), false), + Token::Number("1".to_string(), false), ]; - - compare(expected, tokens.unwrap()); + compare(expected, tokens); } #[test] @@ -3472,30 +2668,18 @@ mod tests { .tokenize_with_location() .unwrap(); let expected = vec![ - TokenWithSpan::at(Token::make_keyword("SELECT"), (1, 1).into(), (1, 7).into()), - TokenWithSpan::at( - Token::Whitespace(Whitespace::Space), - (1, 7).into(), - (1, 8).into(), - ), - TokenWithSpan::at(Token::make_word("a", None), (1, 8).into(), (1, 9).into()), - TokenWithSpan::at(Token::Comma, (1, 9).into(), (1, 10).into()), - TokenWithSpan::at( - Token::Whitespace(Whitespace::Newline), - (1, 10).into(), - (2, 1).into(), - ), - TokenWithSpan::at( - Token::Whitespace(Whitespace::Space), - (2, 1).into(), - (2, 2).into(), - ), - TokenWithSpan::at(Token::make_word("b", None), (2, 2).into(), (2, 3).into()), + TokenWithLocation::new(Token::make_keyword("SELECT"), 1, 1), + TokenWithLocation::new(Token::Whitespace(Whitespace::Space), 1, 7), + TokenWithLocation::new(Token::make_word("a", None), 1, 8), + TokenWithLocation::new(Token::Comma, 1, 9), + TokenWithLocation::new(Token::Whitespace(Whitespace::Newline), 1, 10), + TokenWithLocation::new(Token::Whitespace(Whitespace::Space), 2, 1), + TokenWithLocation::new(Token::make_word("b", None), 2, 2), ]; compare(expected, tokens); } - fn compare(expected: Vec, actual: Vec) { + fn compare(expected: Vec, actual: Vec) { //println!("------------------------------"); //println!("tokens = {:?}", actual); //println!("expected = {:?}", expected); @@ -3504,7 +2688,7 @@ mod tests { } fn check_unescape(s: &str, expected: Option<&str>) { - let s = format!("'{s}'"); + let s = format!("'{}'", s); let mut state = State { peekable: s.chars().peekable(), line: 0, @@ -3637,9 +2821,6 @@ mod tests { (r#"'\\a\\b\'c'"#, r#"\\a\\b\'c"#, r#"\a\b'c"#), (r#"'\'abcd'"#, r#"\'abcd"#, r#"'abcd"#), (r#"'''a''b'"#, r#"''a''b"#, r#"'a'b"#), - (r#"'\q'"#, r#"\q"#, r#"q"#), - (r#"'\%\_'"#, r#"\%\_"#, r#"%_"#), - (r#"'\\%\\_'"#, r#"\\%\\_"#, r#"\%\_"#), ] { let tokens = Tokenizer::new(&dialect, sql) .with_unescape(false) @@ -3673,16 +2854,6 @@ mod tests { compare(expected, tokens); } - - // MySQL special case for LIKE escapes - for (sql, expected) in [(r#"'\%'"#, r#"\%"#), (r#"'\_'"#, r#"\_"#)] { - let dialect = MySqlDialect {}; - let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); - - let expected = vec![Token::SingleQuotedString(expected.to_string())]; - - compare(expected, tokens); - } } #[test] @@ -3804,270 +2975,4 @@ mod tests { let expected = vec![Token::SingleQuotedString("''".to_string())]; compare(expected, tokens); } - - #[test] - fn test_mysql_users_grantees() { - let dialect = MySqlDialect {}; - - let sql = "CREATE USER `root`@`%`"; - let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); - let expected = vec![ - Token::make_keyword("CREATE"), - Token::Whitespace(Whitespace::Space), - Token::make_keyword("USER"), - Token::Whitespace(Whitespace::Space), - Token::make_word("root", Some('`')), - Token::AtSign, - Token::make_word("%", Some('`')), - ]; - compare(expected, tokens); - } - - #[test] - fn test_postgres_abs_without_space_and_string_literal() { - let dialect = MySqlDialect {}; - - let sql = "SELECT @'1'"; - let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); - let expected = vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::AtSign, - Token::SingleQuotedString("1".to_string()), - ]; - compare(expected, tokens); - } - - #[test] - fn test_postgres_abs_without_space_and_quoted_column() { - let dialect = MySqlDialect {}; - - let sql = r#"SELECT @"bar" FROM foo"#; - let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); - let expected = vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::AtSign, - Token::DoubleQuotedString("bar".to_string()), - Token::Whitespace(Whitespace::Space), - Token::make_keyword("FROM"), - Token::Whitespace(Whitespace::Space), - Token::make_word("foo", None), - ]; - compare(expected, tokens); - } - - #[test] - fn test_national_strings_backslash_escape_not_supported() { - all_dialects_where(|dialect| !dialect.supports_string_literal_backslash_escape()) - .tokenizes_to( - "select n'''''\\'", - vec![ - Token::make_keyword("select"), - Token::Whitespace(Whitespace::Space), - Token::NationalStringLiteral("''\\".to_string()), - ], - ); - } - - #[test] - fn test_national_strings_backslash_escape_supported() { - all_dialects_where(|dialect| dialect.supports_string_literal_backslash_escape()) - .tokenizes_to( - "select n'''''\\''", - vec![ - Token::make_keyword("select"), - Token::Whitespace(Whitespace::Space), - Token::NationalStringLiteral("'''".to_string()), - ], - ); - } - - #[test] - fn test_string_escape_constant_not_supported() { - all_dialects_where(|dialect| !dialect.supports_string_escape_constant()).tokenizes_to( - "select e'...'", - vec![ - Token::make_keyword("select"), - Token::Whitespace(Whitespace::Space), - Token::make_word("e", None), - Token::SingleQuotedString("...".to_string()), - ], - ); - - all_dialects_where(|dialect| !dialect.supports_string_escape_constant()).tokenizes_to( - "select E'...'", - vec![ - Token::make_keyword("select"), - Token::Whitespace(Whitespace::Space), - Token::make_word("E", None), - Token::SingleQuotedString("...".to_string()), - ], - ); - } - - #[test] - fn test_string_escape_constant_supported() { - all_dialects_where(|dialect| dialect.supports_string_escape_constant()).tokenizes_to( - "select e'\\''", - vec![ - Token::make_keyword("select"), - Token::Whitespace(Whitespace::Space), - Token::EscapedStringLiteral("'".to_string()), - ], - ); - - all_dialects_where(|dialect| dialect.supports_string_escape_constant()).tokenizes_to( - "select E'\\''", - vec![ - Token::make_keyword("select"), - Token::Whitespace(Whitespace::Space), - Token::EscapedStringLiteral("'".to_string()), - ], - ); - } - - #[test] - fn test_whitespace_required_after_single_line_comment() { - all_dialects_where(|dialect| dialect.requires_single_line_comment_whitespace()) - .tokenizes_to( - "SELECT --'abc'", - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::Minus, - Token::Minus, - Token::SingleQuotedString("abc".to_string()), - ], - ); - - all_dialects_where(|dialect| dialect.requires_single_line_comment_whitespace()) - .tokenizes_to( - "SELECT -- 'abc'", - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::Whitespace(Whitespace::SingleLineComment { - prefix: "--".to_string(), - comment: " 'abc'".to_string(), - }), - ], - ); - - all_dialects_where(|dialect| dialect.requires_single_line_comment_whitespace()) - .tokenizes_to( - "SELECT --", - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::Minus, - Token::Minus, - ], - ); - } - - #[test] - fn test_whitespace_not_required_after_single_line_comment() { - all_dialects_where(|dialect| !dialect.requires_single_line_comment_whitespace()) - .tokenizes_to( - "SELECT --'abc'", - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::Whitespace(Whitespace::SingleLineComment { - prefix: "--".to_string(), - comment: "'abc'".to_string(), - }), - ], - ); - - all_dialects_where(|dialect| !dialect.requires_single_line_comment_whitespace()) - .tokenizes_to( - "SELECT -- 'abc'", - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::Whitespace(Whitespace::SingleLineComment { - prefix: "--".to_string(), - comment: " 'abc'".to_string(), - }), - ], - ); - - all_dialects_where(|dialect| !dialect.requires_single_line_comment_whitespace()) - .tokenizes_to( - "SELECT --", - vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::Whitespace(Whitespace::SingleLineComment { - prefix: "--".to_string(), - comment: "".to_string(), - }), - ], - ); - } - - #[test] - fn test_tokenize_identifiers_numeric_prefix() { - all_dialects_where(|dialect| dialect.supports_numeric_prefix()) - .tokenizes_to("123abc", vec![Token::make_word("123abc", None)]); - - all_dialects_where(|dialect| dialect.supports_numeric_prefix()) - .tokenizes_to("12e34", vec![Token::Number("12e34".to_string(), false)]); - - all_dialects_where(|dialect| dialect.supports_numeric_prefix()).tokenizes_to( - "t.12e34", - vec![ - Token::make_word("t", None), - Token::Period, - Token::make_word("12e34", None), - ], - ); - - all_dialects_where(|dialect| dialect.supports_numeric_prefix()).tokenizes_to( - "t.1two3", - vec![ - Token::make_word("t", None), - Token::Period, - Token::make_word("1two3", None), - ], - ); - } - - #[test] - fn tokenize_period_underscore() { - let sql = String::from("SELECT table._col"); - // a dialect that supports underscores in numeric literals - let dialect = PostgreSqlDialect {}; - let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); - - let expected = vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::Word(Word { - value: "table".to_string(), - quote_style: None, - keyword: Keyword::TABLE, - }), - Token::Period, - Token::Word(Word { - value: "_col".to_string(), - quote_style: None, - keyword: Keyword::NoKeyword, - }), - ]; - - compare(expected, tokens); - - let sql = String::from("SELECT ._123"); - if let Ok(tokens) = Tokenizer::new(&dialect, &sql).tokenize() { - panic!("Tokenizer should have failed on {sql}, but it succeeded with {tokens:?}"); - } - - let sql = String::from("SELECT ._abc"); - if let Ok(tokens) = Tokenizer::new(&dialect, &sql).tokenize() { - panic!("Tokenizer should have failed on {sql}, but it succeeded with {tokens:?}"); - } - } } diff --git a/tests/pretty_print.rs b/tests/pretty_print.rs deleted file mode 100644 index f5a9d861..00000000 --- a/tests/pretty_print.rs +++ /dev/null @@ -1,414 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -use sqlparser::dialect::GenericDialect; -use sqlparser::parser::Parser; - -fn prettify(sql: &str) -> String { - let ast = Parser::parse_sql(&GenericDialect {}, sql).unwrap(); - format!("{:#}", ast[0]) -} - -#[test] -fn test_pretty_print_select() { - assert_eq!( - prettify("SELECT a, b, c FROM my_table WHERE x = 1 AND y = 2"), - r#" -SELECT - a, - b, - c -FROM - my_table -WHERE - x = 1 AND y = 2 -"# - .trim() - ); -} - -#[test] -fn test_pretty_print_join() { - assert_eq!( - prettify("SELECT a FROM table1 JOIN table2 ON table1.id = table2.id"), - r#" -SELECT - a -FROM - table1 - JOIN table2 ON table1.id = table2.id -"# - .trim() - ); -} - -#[test] -fn test_pretty_print_subquery() { - assert_eq!( - prettify("SELECT * FROM (SELECT a, b FROM my_table) AS subquery"), - r#" -SELECT - * -FROM - ( - SELECT - a, - b - FROM - my_table - ) AS subquery -"# - .trim() - ); -} - -#[test] -fn test_pretty_print_union() { - assert_eq!( - prettify("SELECT a FROM table1 UNION SELECT b FROM table2"), - r#" -SELECT - a -FROM - table1 -UNION -SELECT - b -FROM - table2 -"# - .trim() - ); -} - -#[test] -fn test_pretty_print_group_by() { - assert_eq!( - prettify("SELECT a, COUNT(*) FROM my_table GROUP BY a HAVING COUNT(*) > 1"), - r#" -SELECT - a, - COUNT(*) -FROM - my_table -GROUP BY - a -HAVING - COUNT(*) > 1 -"# - .trim() - ); -} - -#[test] -fn test_pretty_print_cte() { - assert_eq!( - prettify("WITH cte AS (SELECT a, b FROM my_table) SELECT * FROM cte"), - r#" -WITH cte AS ( - SELECT - a, - b - FROM - my_table -) -SELECT - * -FROM - cte -"# - .trim() - ); -} - -#[test] -fn test_pretty_print_case_when() { - assert_eq!( - prettify("SELECT CASE WHEN x > 0 THEN 'positive' WHEN x < 0 THEN 'negative' ELSE 'zero' END FROM my_table"), - r#" -SELECT - CASE - WHEN x > 0 THEN - 'positive' - WHEN x < 0 THEN - 'negative' - ELSE - 'zero' - END -FROM - my_table -"#.trim() - ); -} - -#[test] -fn test_pretty_print_window_function() { - assert_eq!( - prettify("SELECT id, value, ROW_NUMBER() OVER (PARTITION BY category ORDER BY value DESC) as rank FROM my_table"), - r#" -SELECT - id, - value, - ROW_NUMBER() OVER ( - PARTITION BY category - ORDER BY value DESC - ) AS rank -FROM - my_table -"#.trim() - ); -} - -#[test] -fn test_pretty_print_multiline_string() { - assert_eq!( - prettify("SELECT 'multiline\nstring' AS str"), - r#" -SELECT - 'multiline -string' AS str -"# - .trim(), - "A literal string with a newline should be kept as is. The contents of the string should not be indented." - ); -} - -#[test] -fn test_pretty_print_insert_values() { - assert_eq!( - prettify("INSERT INTO my_table (a, b, c) VALUES (1, 2, 3), (4, 5, 6)"), - r#" -INSERT INTO my_table (a, b, c) -VALUES - (1, 2, 3), - (4, 5, 6) -"# - .trim() - ); -} - -#[test] -fn test_pretty_print_insert_select() { - assert_eq!( - prettify("INSERT INTO my_table (a, b) SELECT x, y FROM source_table RETURNING a AS id"), - r#" -INSERT INTO my_table (a, b) -SELECT - x, - y -FROM - source_table -RETURNING - a AS id -"# - .trim() - ); -} - -#[test] -fn test_pretty_print_update() { - assert_eq!( - prettify("UPDATE my_table SET a = 1, b = 2 WHERE x > 0 RETURNING id, name"), - r#" -UPDATE my_table -SET - a = 1, - b = 2 -WHERE - x > 0 -RETURNING - id, - name -"# - .trim() - ); -} - -#[test] -fn test_pretty_print_delete() { - assert_eq!( - prettify("DELETE FROM my_table WHERE x > 0 RETURNING id, name"), - r#" -DELETE FROM - my_table -WHERE - x > 0 -RETURNING - id, - name -"# - .trim() - ); - - assert_eq!( - prettify("DELETE table1, table2"), - r#" -DELETE - table1, - table2 -"# - .trim() - ); -} - -#[test] -fn test_pretty_print_create_table() { - assert_eq!( - prettify("CREATE TABLE my_table (id INT PRIMARY KEY, name VARCHAR(255) NOT NULL, CONSTRAINT fk_other FOREIGN KEY (id) REFERENCES other_table(id))"), - r#" -CREATE TABLE my_table ( - id INT PRIMARY KEY, - name VARCHAR(255) NOT NULL, - CONSTRAINT fk_other FOREIGN KEY (id) REFERENCES other_table(id) -) -"# - .trim() - ); -} - -#[test] -fn test_pretty_print_create_view() { - assert_eq!( - prettify("CREATE VIEW my_view AS SELECT a, b FROM my_table WHERE x > 0"), - r#" -CREATE VIEW my_view AS -SELECT - a, - b -FROM - my_table -WHERE - x > 0 -"# - .trim() - ); -} - -#[test] -#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"] -fn test_pretty_print_create_function() { - assert_eq!( - prettify("CREATE FUNCTION my_func() RETURNS INT BEGIN SELECT COUNT(*) INTO @count FROM my_table; RETURN @count; END"), - r#" -CREATE FUNCTION my_func() RETURNS INT -BEGIN - SELECT COUNT(*) INTO @count FROM my_table; - RETURN @count; -END -"# - .trim() - ); -} - -#[test] -#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"] -fn test_pretty_print_json_table() { - assert_eq!( - prettify("SELECT * FROM JSON_TABLE(@json, '$[*]' COLUMNS (id INT PATH '$.id', name VARCHAR(255) PATH '$.name')) AS jt"), - r#" -SELECT - * -FROM - JSON_TABLE( - @json, - '$[*]' COLUMNS ( - id INT PATH '$.id', - name VARCHAR(255) PATH '$.name' - ) - ) AS jt -"# - .trim() - ); -} - -#[test] -#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"] -fn test_pretty_print_transaction_blocks() { - assert_eq!( - prettify("BEGIN; UPDATE my_table SET x = 1; COMMIT;"), - r#" -BEGIN; -UPDATE my_table SET x = 1; -COMMIT; -"# - .trim() - ); -} - -#[test] -#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"] -fn test_pretty_print_control_flow() { - assert_eq!( - prettify("IF x > 0 THEN SELECT 'positive'; ELSE SELECT 'negative'; END IF;"), - r#" -IF x > 0 THEN - SELECT 'positive'; -ELSE - SELECT 'negative'; -END IF; -"# - .trim() - ); -} - -#[test] -#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"] -fn test_pretty_print_merge() { - assert_eq!( - prettify("MERGE INTO target_table t USING source_table s ON t.id = s.id WHEN MATCHED THEN UPDATE SET t.value = s.value WHEN NOT MATCHED THEN INSERT (id, value) VALUES (s.id, s.value)"), - r#" -MERGE INTO target_table t -USING source_table s ON t.id = s.id -WHEN MATCHED THEN - UPDATE SET t.value = s.value -WHEN NOT MATCHED THEN - INSERT (id, value) VALUES (s.id, s.value) -"# - .trim() - ); -} - -#[test] -#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"] -fn test_pretty_print_create_index() { - assert_eq!( - prettify("CREATE INDEX idx_name ON my_table (column1, column2)"), - r#" -CREATE INDEX idx_name -ON my_table (column1, column2) -"# - .trim() - ); -} - -#[test] -#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"] -fn test_pretty_print_explain() { - assert_eq!( - prettify("EXPLAIN ANALYZE SELECT * FROM my_table WHERE x > 0"), - r#" -EXPLAIN ANALYZE -SELECT - * -FROM - my_table -WHERE - x > 0 -"# - .trim() - ); -} diff --git a/tests/sqlparser_bigquery.rs b/tests/sqlparser_bigquery.rs index 2ba54d3e..2bf470f7 100644 --- a/tests/sqlparser_bigquery.rs +++ b/tests/sqlparser_bigquery.rs @@ -23,151 +23,124 @@ use std::ops::Deref; use sqlparser::ast::*; use sqlparser::dialect::{BigQueryDialect, GenericDialect}; use sqlparser::parser::{ParserError, ParserOptions}; -use sqlparser::tokenizer::{Location, Span}; use test_utils::*; #[test] fn parse_literal_string() { let sql = concat!( - "SELECT ", // line 1, column 1 - "'single', ", // line 1, column 7 - r#""double", "#, // line 1, column 14 - "'''triple-single''', ", // line 1, column 22 - r#""""triple-double""", "#, // line 1, column 33 - r#"'single\'escaped', "#, // line 1, column 43 - r#"'''triple-single\'escaped''', "#, // line 1, column 55 - r#"'''triple-single'unescaped''', "#, // line 1, column 68 - r#""double\"escaped", "#, // line 1, column 83 - r#""""triple-double\"escaped""", "#, // line 1, column 92 - r#""""triple-double"unescaped""", "#, // line 1, column 105 - r#""""triple-double'unescaped""", "#, // line 1, column 118 - r#"'''triple-single"unescaped'''"#, // line 1, column 131 + "SELECT ", + "'single', ", + r#""double", "#, + "'''triple-single''', ", + r#""""triple-double""", "#, + r#"'single\'escaped', "#, + r#"'''triple-single\'escaped''', "#, + r#"'''triple-single'unescaped''', "#, + r#""double\"escaped", "#, + r#""""triple-double\"escaped""", "#, + r#""""triple-double"unescaped""""#, ); let dialect = TestedDialects::new_with_options( vec![Box::new(BigQueryDialect {})], ParserOptions::new().with_unescape(false), ); let select = dialect.verified_only_select(sql); - assert_eq!(12, select.projection.len()); + assert_eq!(10, select.projection.len()); assert_eq!( - &Expr::Value(Value::SingleQuotedString("single".into()).with_empty_span()), + &Expr::Value(Value::SingleQuotedString("single".to_string())), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value(Value::DoubleQuotedString("double".into()).with_empty_span()), + &Expr::Value(Value::DoubleQuotedString("double".to_string())), expr_from_projection(&select.projection[1]) ); assert_eq!( - &Expr::Value(Value::TripleSingleQuotedString("triple-single".into()).with_empty_span()), + &Expr::Value(Value::TripleSingleQuotedString("triple-single".to_string())), expr_from_projection(&select.projection[2]) ); assert_eq!( - &Expr::Value(Value::TripleDoubleQuotedString("triple-double".into()).with_empty_span()), + &Expr::Value(Value::TripleDoubleQuotedString("triple-double".to_string())), expr_from_projection(&select.projection[3]) ); assert_eq!( - &Expr::Value(Value::SingleQuotedString(r#"single\'escaped"#.into()).with_empty_span()), + &Expr::Value(Value::SingleQuotedString(r#"single\'escaped"#.to_string())), expr_from_projection(&select.projection[4]) ); assert_eq!( - &Expr::Value( - Value::TripleSingleQuotedString(r#"triple-single\'escaped"#.into()).with_empty_span() - ), + &Expr::Value(Value::TripleSingleQuotedString( + r#"triple-single\'escaped"#.to_string() + )), expr_from_projection(&select.projection[5]) ); assert_eq!( - &Expr::Value( - Value::TripleSingleQuotedString(r#"triple-single'unescaped"#.into()).with_empty_span() - ), + &Expr::Value(Value::TripleSingleQuotedString( + r#"triple-single'unescaped"#.to_string() + )), expr_from_projection(&select.projection[6]) ); assert_eq!( - &Expr::Value(Value::DoubleQuotedString(r#"double\"escaped"#.to_string()).with_empty_span()), + &Expr::Value(Value::DoubleQuotedString(r#"double\"escaped"#.to_string())), expr_from_projection(&select.projection[7]) ); assert_eq!( - &Expr::Value( - Value::TripleDoubleQuotedString(r#"triple-double\"escaped"#.to_string()) - .with_empty_span() - ), + &Expr::Value(Value::TripleDoubleQuotedString( + r#"triple-double\"escaped"#.to_string() + )), expr_from_projection(&select.projection[8]) ); assert_eq!( - &Expr::Value( - Value::TripleDoubleQuotedString(r#"triple-double"unescaped"#.to_string()) - .with_empty_span() - ), + &Expr::Value(Value::TripleDoubleQuotedString( + r#"triple-double"unescaped"#.to_string() + )), expr_from_projection(&select.projection[9]) ); - assert_eq!( - &Expr::Value( - Value::TripleDoubleQuotedString(r#"triple-double'unescaped"#.to_string()) - .with_empty_span() - ), - expr_from_projection(&select.projection[10]) - ); - assert_eq!( - &Expr::Value( - Value::TripleSingleQuotedString(r#"triple-single"unescaped"#.to_string()) - .with_empty_span() - ), - expr_from_projection(&select.projection[11]) - ); } #[test] fn parse_byte_literal() { let sql = concat!( - "SELECT ", // line 1, column 1 - "B'abc', ", // line 1, column 8 - r#"B"abc", "#, // line 1, column 15 - r#"B'f\(abc,(.*),def\)', "#, // line 1, column 22 - r#"B"f\(abc,(.*),def\)", "#, // line 1, column 42 - r#"B'''abc''', "#, // line 1, column 62 - r#"B"""abc""""#, // line 1, column 74 + "SELECT ", + "B'abc', ", + r#"B"abc", "#, + r#"B'f\(abc,(.*),def\)', "#, + r#"B"f\(abc,(.*),def\)", "#, + r#"B'''abc''', "#, + r#"B"""abc""""#, ); let stmt = bigquery().verified_stmt(sql); if let Statement::Query(query) = stmt { if let SetExpr::Select(select) = *query.body { assert_eq!(6, select.projection.len()); assert_eq!( - &Expr::Value( - Value::SingleQuotedByteStringLiteral("abc".to_string()).with_empty_span() - ), + &Expr::Value(Value::SingleQuotedByteStringLiteral("abc".to_string())), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value( - Value::DoubleQuotedByteStringLiteral("abc".to_string()).with_empty_span() - ), + &Expr::Value(Value::DoubleQuotedByteStringLiteral("abc".to_string())), expr_from_projection(&select.projection[1]) ); assert_eq!( - &Expr::Value( - Value::SingleQuotedByteStringLiteral(r"f\(abc,(.*),def\)".to_string()) - .with_empty_span() - ), + &Expr::Value(Value::SingleQuotedByteStringLiteral( + r"f\(abc,(.*),def\)".to_string() + )), expr_from_projection(&select.projection[2]) ); assert_eq!( - &Expr::Value( - Value::DoubleQuotedByteStringLiteral(r"f\(abc,(.*),def\)".to_string()) - .with_empty_span() - ), + &Expr::Value(Value::DoubleQuotedByteStringLiteral( + r"f\(abc,(.*),def\)".to_string() + )), expr_from_projection(&select.projection[3]) ); assert_eq!( - &Expr::Value( - Value::TripleSingleQuotedByteStringLiteral(r"abc".to_string()) - .with_empty_span() - ), + &Expr::Value(Value::TripleSingleQuotedByteStringLiteral( + r"abc".to_string() + )), expr_from_projection(&select.projection[4]) ); assert_eq!( - &Expr::Value( - Value::TripleDoubleQuotedByteStringLiteral(r"abc".to_string()) - .with_empty_span() - ), + &Expr::Value(Value::TripleDoubleQuotedByteStringLiteral( + r"abc".to_string() + )), expr_from_projection(&select.projection[5]) ); } @@ -184,54 +157,48 @@ fn parse_byte_literal() { #[test] fn parse_raw_literal() { let sql = concat!( - "SELECT ", // line 1, column 1 - "R'abc', ", // line 1, column 8 - r#"R"abc", "#, // line 1, column 15 - r#"R'f\(abc,(.*),def\)', "#, // line 1, column 22 - r#"R"f\(abc,(.*),def\)", "#, // line 1, column 42 - r#"R'''abc''', "#, // line 1, column 62 - r#"R"""abc""""#, // line 1, column 74 + "SELECT ", + "R'abc', ", + r#"R"abc", "#, + r#"R'f\(abc,(.*),def\)', "#, + r#"R"f\(abc,(.*),def\)", "#, + r#"R'''abc''', "#, + r#"R"""abc""""#, ); let stmt = bigquery().verified_stmt(sql); if let Statement::Query(query) = stmt { if let SetExpr::Select(select) = *query.body { assert_eq!(6, select.projection.len()); assert_eq!( - &Expr::Value( - Value::SingleQuotedRawStringLiteral("abc".to_string()).with_empty_span() - ), + &Expr::Value(Value::SingleQuotedRawStringLiteral("abc".to_string())), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value( - Value::DoubleQuotedRawStringLiteral("abc".to_string()).with_empty_span() - ), + &Expr::Value(Value::DoubleQuotedRawStringLiteral("abc".to_string())), expr_from_projection(&select.projection[1]) ); assert_eq!( - &Expr::Value( - Value::SingleQuotedRawStringLiteral(r"f\(abc,(.*),def\)".to_string()) - .with_empty_span() - ), + &Expr::Value(Value::SingleQuotedRawStringLiteral( + r"f\(abc,(.*),def\)".to_string() + )), expr_from_projection(&select.projection[2]) ); assert_eq!( - &Expr::Value( - Value::DoubleQuotedRawStringLiteral(r"f\(abc,(.*),def\)".to_string()) - .with_empty_span() - ), + &Expr::Value(Value::DoubleQuotedRawStringLiteral( + r"f\(abc,(.*),def\)".to_string() + )), expr_from_projection(&select.projection[3]) ); assert_eq!( - &Expr::Value( - Value::TripleSingleQuotedRawStringLiteral(r"abc".to_string()).with_empty_span() - ), + &Expr::Value(Value::TripleSingleQuotedRawStringLiteral( + r"abc".to_string() + )), expr_from_projection(&select.projection[4]) ); assert_eq!( - &Expr::Value( - Value::TripleDoubleQuotedRawStringLiteral(r"abc".to_string()).with_empty_span() - ), + &Expr::Value(Value::TripleDoubleQuotedRawStringLiteral( + r"abc".to_string() + )), expr_from_projection(&select.projection[5]) ); } @@ -245,64 +212,6 @@ fn parse_raw_literal() { ); } -#[test] -fn parse_big_query_non_reserved_column_alias() { - let sql = r#"SELECT OFFSET, EXPLAIN, ANALYZE, SORT, TOP, VIEW FROM T"#; - bigquery().verified_stmt(sql); - - let sql = r#"SELECT 1 AS OFFSET, 2 AS EXPLAIN, 3 AS ANALYZE FROM T"#; - bigquery().verified_stmt(sql); -} - -#[test] -fn parse_at_at_identifier() { - bigquery().verified_stmt("SELECT @@error.stack_trace, @@error.message"); -} - -#[test] -fn parse_begin() { - let sql = r#"BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT 2; RAISE USING MESSAGE = FORMAT('ERR: %s', 'Bad'); END"#; - let Statement::StartTransaction { - statements, - exception, - has_end_keyword, - .. - } = bigquery().verified_stmt(sql) - else { - unreachable!(); - }; - assert_eq!(1, statements.len()); - assert!(exception.is_some()); - - let exception = exception.unwrap(); - assert_eq!(1, exception.len()); - assert!(has_end_keyword); - - bigquery().verified_stmt( - "BEGIN SELECT 1; SELECT 2; EXCEPTION WHEN ERROR THEN SELECT 2; SELECT 4; END", - ); - bigquery() - .verified_stmt("BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT @@error.stack_trace; END"); - bigquery().verified_stmt("BEGIN EXCEPTION WHEN ERROR THEN SELECT 2; END"); - bigquery().verified_stmt("BEGIN SELECT 1; SELECT 2; EXCEPTION WHEN ERROR THEN END"); - bigquery().verified_stmt("BEGIN EXCEPTION WHEN ERROR THEN END"); - bigquery().verified_stmt("BEGIN SELECT 1; SELECT 2; END"); - bigquery().verified_stmt("BEGIN END"); - - assert_eq!( - bigquery() - .parse_sql_statements("BEGIN SELECT 1; SELECT 2 END") - .unwrap_err(), - ParserError::ParserError("Expected: ;, found: END".to_string()) - ); - assert_eq!( - bigquery() - .parse_sql_statements("BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT 2 END") - .unwrap_err(), - ParserError::ParserError("Expected: ;, found: END".to_string()) - ); -} - #[test] fn parse_delete_statement() { let sql = "DELETE \"table\" WHERE 1"; @@ -312,7 +221,15 @@ fn parse_delete_statement() { .. }) => { assert_eq!( - table_from_name(ObjectName::from(vec![Ident::with_quote('"', "table")])), + TableFactor::Table { + name: ObjectName(vec![Ident::with_quote('"', "table")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, from[0].relation ); } @@ -339,7 +256,7 @@ fn parse_create_view_with_options() { } => { assert_eq!( name, - ObjectName::from(vec![ + ObjectName(vec![ "myproject".into(), "mydataset".into(), "newview".into() @@ -355,16 +272,10 @@ fn parse_create_view_with_options() { ViewColumnDef { name: Ident::new("age"), data_type: None, - options: Some(ColumnOptions::CommaSeparated(vec![ColumnOption::Options( - vec![SqlOption::KeyValue { - key: Ident::new("description"), - value: Expr::Value( - Value::DoubleQuotedString("field age".to_string()).with_span( - Span::new(Location::new(1, 42), Location::new(1, 52)) - ) - ), - }] - )])), + options: Some(vec![ColumnOption::Options(vec![SqlOption::KeyValue { + key: Ident::new("description"), + value: Expr::Value(Value::DoubleQuotedString("field age".to_string())), + }])]), }, ], columns @@ -383,10 +294,9 @@ fn parse_create_view_with_options() { assert_eq!( &SqlOption::KeyValue { key: Ident::new("description"), - value: Expr::Value( - Value::DoubleQuotedString("a view that expires in 2 days".to_string()) - .with_empty_span() - ), + value: Expr::Value(Value::DoubleQuotedString( + "a view that expires in 2 days".to_string() + )), }, &options[2], ); @@ -394,7 +304,6 @@ fn parse_create_view_with_options() { _ => unreachable!(), } } - #[test] fn parse_create_view_if_not_exists() { let sql = "CREATE VIEW IF NOT EXISTS mydataset.newview AS SELECT foo FROM bar"; @@ -454,7 +363,7 @@ fn parse_create_table_with_unquoted_hyphen() { Statement::CreateTable(CreateTable { name, columns, .. }) => { assert_eq!( name, - ObjectName::from(vec![ + ObjectName(vec![ "my-pro-ject".into(), "mydataset".into(), "mytable".into() @@ -464,6 +373,7 @@ fn parse_create_table_with_unquoted_hyphen() { vec![ColumnDef { name: Ident::new("x"), data_type: DataType::Int64, + collation: None, options: vec![] },], columns @@ -489,18 +399,19 @@ fn parse_create_table_with_options() { columns, partition_by, cluster_by, - table_options, + options, .. }) => { assert_eq!( name, - ObjectName::from(vec!["mydataset".into(), "newtable".into()]) + ObjectName(vec!["mydataset".into(), "newtable".into()]) ); assert_eq!( vec![ ColumnDef { name: Ident::new("x"), data_type: DataType::Int64, + collation: None, options: vec![ ColumnOptionDef { name: None, @@ -510,11 +421,9 @@ fn parse_create_table_with_options() { name: None, option: ColumnOption::Options(vec![SqlOption::KeyValue { key: Ident::new("description"), - value: Expr::Value( - Value::DoubleQuotedString("field x".to_string()).with_span( - Span::new(Location::new(1, 42), Location::new(1, 52)) - ) - ), + value: Expr::Value(Value::DoubleQuotedString( + "field x".to_string() + )), },]) }, ] @@ -522,15 +431,14 @@ fn parse_create_table_with_options() { ColumnDef { name: Ident::new("y"), data_type: DataType::Bool, + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Options(vec![SqlOption::KeyValue { key: Ident::new("description"), - value: Expr::Value( - Value::DoubleQuotedString("field y".to_string()).with_span( - Span::new(Location::new(1, 42), Location::new(1, 52)) - ) - ), + value: Expr::Value(Value::DoubleQuotedString( + "field y".to_string() + )), },]) }] }, @@ -541,32 +449,23 @@ fn parse_create_table_with_options() { ( Some(Box::new(Expr::Identifier(Ident::new("_PARTITIONDATE")))), Some(WrappedCollection::NoWrapping(vec![ - Expr::Identifier(Ident::new("userid")), - Expr::Identifier(Ident::new("age")), + Ident::new("userid"), + Ident::new("age"), ])), - CreateTableOptions::Options(vec![ + Some(vec![ SqlOption::KeyValue { key: Ident::new("partition_expiration_days"), - value: Expr::Value( - number("1").with_span(Span::new( - Location::new(1, 42), - Location::new(1, 43) - )) - ), + value: Expr::Value(number("1")), }, SqlOption::KeyValue { key: Ident::new("description"), - value: Expr::Value( - Value::DoubleQuotedString("table option description".to_string()) - .with_span(Span::new( - Location::new(1, 42), - Location::new(1, 52) - )) - ), + value: Expr::Value(Value::DoubleQuotedString( + "table option description".to_string() + )), }, ]) ), - (partition_by, cluster_by, table_options) + (partition_by, cluster_by, options) ) } _ => unreachable!(), @@ -581,12 +480,6 @@ fn parse_create_table_with_options() { r#"description = "table option description")"# ); bigquery().verified_stmt(sql); - - let sql = "CREATE TABLE foo (x INT64) OPTIONS()"; - bigquery().verified_stmt(sql); - - let sql = "CREATE TABLE db.schema.test (x INT64 OPTIONS(description = 'An optional INTEGER field')) OPTIONS()"; - bigquery().verified_stmt(sql); } #[test] @@ -594,7 +487,7 @@ fn parse_nested_data_types() { let sql = "CREATE TABLE table (x STRUCT, b BYTES(42)>, y ARRAY>)"; match bigquery_and_generic().one_statement_parses_to(sql, sql) { Statement::CreateTable(CreateTable { name, columns, .. }) => { - assert_eq!(name, ObjectName::from(vec!["table".into()])); + assert_eq!(name, ObjectName(vec!["table".into()])); assert_eq!( columns, vec![ @@ -606,17 +499,16 @@ fn parse_nested_data_types() { field_name: Some("a".into()), field_type: DataType::Array(ArrayElemTypeDef::AngleBracket( Box::new(DataType::Int64,) - )), - options: None, + )) }, StructField { field_name: Some("b".into()), - field_type: DataType::Bytes(Some(42)), - options: None, + field_type: DataType::Bytes(Some(42)) }, ], StructBracketKind::AngleBrackets ), + collation: None, options: vec![], }, ColumnDef { @@ -626,11 +518,11 @@ fn parse_nested_data_types() { vec![StructField { field_name: None, field_type: DataType::Int64, - options: None, }], StructBracketKind::AngleBrackets ), ))), + collation: None, options: vec![], }, ] @@ -640,27 +532,56 @@ fn parse_nested_data_types() { } } +#[test] +fn parse_invalid_brackets() { + let sql = "SELECT STRUCT>(NULL)"; + assert_eq!( + bigquery_and_generic() + .parse_sql_statements(sql) + .unwrap_err(), + ParserError::ParserError("unmatched > in STRUCT literal".to_string()) + ); + + let sql = "SELECT STRUCT>>(NULL)"; + assert_eq!( + bigquery_and_generic() + .parse_sql_statements(sql) + .unwrap_err(), + ParserError::ParserError("Expected: (, found: >".to_string()) + ); + + let sql = "CREATE TABLE table (x STRUCT>>)"; + assert_eq!( + bigquery_and_generic() + .parse_sql_statements(sql) + .unwrap_err(), + ParserError::ParserError( + "Expected: ',' or ')' after column definition, found: >".to_string() + ) + ); +} + #[test] fn parse_tuple_struct_literal() { // tuple syntax: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#tuple_syntax // syntax: (expr1, expr2 [, ... ]) - let sql = "SELECT (1, 2, 3), (1, 1.0, '123', true)"; // line 1, column 1 + let sql = "SELECT (1, 2, 3), (1, 1.0, '123', true)"; let select = bigquery().verified_only_select(sql); assert_eq!(2, select.projection.len()); assert_eq!( &Expr::Tuple(vec![ - Expr::value(number("1")), - Expr::value(number("2")), - Expr::value(number("3")), + Expr::Value(number("1")), + Expr::Value(number("2")), + Expr::Value(number("3")), ]), expr_from_projection(&select.projection[0]) ); assert_eq!( &Expr::Tuple(vec![ - Expr::value(number("1")), - Expr::value(number("1.0")), - Expr::Value(Value::SingleQuotedString("123".into()).with_empty_span()), - Expr::Value(Value::Boolean(true).with_empty_span()) + Expr::Value(number("1")), + Expr::Value(number("1.0")), + Expr::Value(Value::SingleQuotedString("123".to_string())), + Expr::Value(Value::Boolean(true)) ]), expr_from_projection(&select.projection[1]) ); @@ -676,9 +597,9 @@ fn parse_typeless_struct_syntax() { assert_eq!( &Expr::Struct { values: vec![ - Expr::value(number("1")), - Expr::value(number("2")), - Expr::value(number("3")), + Expr::Value(number("1")), + Expr::Value(number("2")), + Expr::Value(number("3")), ], fields: Default::default() }, @@ -687,35 +608,30 @@ fn parse_typeless_struct_syntax() { assert_eq!( &Expr::Struct { - values: vec![Expr::Value( - Value::SingleQuotedString("abc".into()).with_empty_span() - )], + values: vec![Expr::Value(Value::SingleQuotedString("abc".to_string())),], fields: Default::default() }, expr_from_projection(&select.projection[1]) ); - assert_eq!( &Expr::Struct { values: vec![ - Expr::value(number("1")), + Expr::Value(number("1")), Expr::CompoundIdentifier(vec![Ident::from("t"), Ident::from("str_col")]), ], fields: Default::default() }, expr_from_projection(&select.projection[2]) ); - assert_eq!( &Expr::Struct { values: vec![ Expr::Named { - expr: Expr::value(number("1")).into(), + expr: Expr::Value(number("1")).into(), name: Ident::from("a") }, Expr::Named { - expr: Expr::Value(Value::SingleQuotedString("abc".into()).with_empty_span()) - .into(), + expr: Expr::Value(Value::SingleQuotedString("abc".to_string())).into(), name: Ident::from("b") }, ], @@ -723,7 +639,6 @@ fn parse_typeless_struct_syntax() { }, expr_from_projection(&select.projection[3]) ); - assert_eq!( &Expr::Struct { values: vec![Expr::Named { @@ -746,11 +661,10 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!(3, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::value(number("5"))], + values: vec![Expr::Value(number("5")),], fields: vec![StructField { field_name: None, field_type: DataType::Int64, - options: None, }] }, expr_from_projection(&select.projection[0]) @@ -758,17 +672,15 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!( &Expr::Struct { values: vec![ - Expr::value(number("1")), + Expr::Value(number("1")), Expr::CompoundIdentifier(vec![ Ident { value: "t".into(), quote_style: None, - span: Span::empty(), }, Ident { value: "str_col".into(), quote_style: None, - span: Span::empty(), }, ]), ], @@ -777,19 +689,15 @@ fn parse_typed_struct_syntax_bigquery() { field_name: Some(Ident { value: "x".into(), quote_style: None, - span: Span::empty(), }), - field_type: DataType::Int64, - options: None, + field_type: DataType::Int64 }, StructField { field_name: Some(Ident { value: "y".into(), quote_style: None, - span: Span::empty(), }), - field_type: DataType::String(None), - options: None, + field_type: DataType::String(None) }, ] }, @@ -800,27 +708,23 @@ fn parse_typed_struct_syntax_bigquery() { values: vec![Expr::Identifier(Ident { value: "nested_col".into(), quote_style: None, - span: Span::empty(), - })], + }),], fields: vec![ StructField { field_name: Some("arr".into()), field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new( DataType::Float64 - ))), - options: None, + ))) }, StructField { field_name: Some("str".into()), field_type: DataType::Struct( vec![StructField { field_name: None, - field_type: DataType::Bool, - options: None, + field_type: DataType::Bool }], StructBracketKind::AngleBrackets - ), - options: None, + ) }, ] }, @@ -835,23 +739,20 @@ fn parse_typed_struct_syntax_bigquery() { values: vec![Expr::Identifier(Ident { value: "nested_col".into(), quote_style: None, - span: Span::empty(), - })], + }),], fields: vec![ StructField { field_name: Some("x".into()), field_type: DataType::Struct( Default::default(), StructBracketKind::AngleBrackets - ), - options: None, + ) }, StructField { field_name: Some("y".into()), field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new( DataType::Struct(Default::default(), StructBracketKind::AngleBrackets) - ))), - options: None, + ))) }, ] }, @@ -863,24 +764,22 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!(2, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::Boolean(true).with_empty_span())], + values: vec![Expr::Value(Value::Boolean(true)),], fields: vec![StructField { field_name: None, - field_type: DataType::Bool, - options: None, + field_type: DataType::Bool }] }, expr_from_projection(&select.projection[0]) ); assert_eq!( &Expr::Struct { - values: vec![Expr::Value( - Value::SingleQuotedByteStringLiteral("abc".into()).with_empty_span() - )], + values: vec![Expr::Value(Value::SingleQuotedByteStringLiteral( + "abc".into() + )),], fields: vec![StructField { field_name: None, - field_type: DataType::Bytes(Some(42)), - options: None, + field_type: DataType::Bytes(Some(42)) }] }, expr_from_projection(&select.projection[1]) @@ -891,13 +790,12 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!(4, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value( - Value::DoubleQuotedString("2011-05-05".into()).with_empty_span() - )], + values: vec![Expr::Value(Value::DoubleQuotedString( + "2011-05-05".to_string() + )),], fields: vec![StructField { field_name: None, - field_type: DataType::Date, - options: None, + field_type: DataType::Date }] }, expr_from_projection(&select.projection[0]) @@ -906,37 +804,31 @@ fn parse_typed_struct_syntax_bigquery() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Datetime(None), - value: ValueWithSpan { - value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()), - span: Span::empty(), - }, - }], + value: "1999-01-01 01:23:34.45".to_string() + },], fields: vec![StructField { field_name: None, - field_type: DataType::Datetime(None), - options: None, + field_type: DataType::Datetime(None) }] }, expr_from_projection(&select.projection[1]) ); assert_eq!( &Expr::Struct { - values: vec![Expr::value(number("5.0"))], + values: vec![Expr::Value(number("5.0")),], fields: vec![StructField { field_name: None, - field_type: DataType::Float64, - options: None, + field_type: DataType::Float64 }] }, expr_from_projection(&select.projection[2]) ); assert_eq!( &Expr::Struct { - values: vec![Expr::value(number("1"))], + values: vec![Expr::Value(number("1")),], fields: vec![StructField { field_name: None, - field_type: DataType::Int64, - options: None, + field_type: DataType::Int64 }] }, expr_from_projection(&select.projection[3]) @@ -948,18 +840,15 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!( &Expr::Struct { values: vec![Expr::Interval(Interval { - value: Box::new(Expr::Value( - Value::SingleQuotedString("2".into()).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString("2".to_string()))), leading_field: Some(DateTimeField::Hour), leading_precision: None, last_field: None, fractional_seconds_precision: None - })], + }),], fields: vec![StructField { field_name: None, - field_type: DataType::Interval, - options: None, + field_type: DataType::Interval }] }, expr_from_projection(&select.projection[0]) @@ -968,17 +857,11 @@ fn parse_typed_struct_syntax_bigquery() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::JSON, - value: ValueWithSpan { - value: Value::SingleQuotedString( - r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into() - ), - span: Span::empty(), - } - }], + value: r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.to_string() + },], fields: vec![StructField { field_name: None, - field_type: DataType::JSON, - options: None, + field_type: DataType::JSON }] }, expr_from_projection(&select.projection[1]) @@ -989,13 +872,10 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!(3, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value( - Value::DoubleQuotedString("foo".into()).with_empty_span() - )], + values: vec![Expr::Value(Value::DoubleQuotedString("foo".to_string())),], fields: vec![StructField { field_name: None, - field_type: DataType::String(Some(42)), - options: None, + field_type: DataType::String(Some(42)) }] }, expr_from_projection(&select.projection[0]) @@ -1004,17 +884,11 @@ fn parse_typed_struct_syntax_bigquery() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Timestamp(None, TimezoneInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString( - "2008-12-25 15:30:00 America/Los_Angeles".into() - ), - span: Span::empty(), - }, - }], + value: "2008-12-25 15:30:00 America/Los_Angeles".to_string() + },], fields: vec![StructField { field_name: None, - field_type: DataType::Timestamp(None, TimezoneInfo::None), - options: None, + field_type: DataType::Timestamp(None, TimezoneInfo::None) }] }, expr_from_projection(&select.projection[1]) @@ -1024,15 +898,11 @@ fn parse_typed_struct_syntax_bigquery() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Time(None, TimezoneInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString("15:30:00".into()), - span: Span::empty(), - } - }], + value: "15:30:00".to_string() + },], fields: vec![StructField { field_name: None, - field_type: DataType::Time(None, TimezoneInfo::None), - options: None, + field_type: DataType::Time(None, TimezoneInfo::None) }] }, expr_from_projection(&select.projection[2]) @@ -1045,15 +915,11 @@ fn parse_typed_struct_syntax_bigquery() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Numeric(ExactNumberInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString("1".into()), - span: Span::empty(), - } - }], + value: "1".to_string() + },], fields: vec![StructField { field_name: None, - field_type: DataType::Numeric(ExactNumberInfo::None), - options: None, + field_type: DataType::Numeric(ExactNumberInfo::None) }] }, expr_from_projection(&select.projection[0]) @@ -1062,15 +928,11 @@ fn parse_typed_struct_syntax_bigquery() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString("1".into()), - span: Span::empty(), - } - }], + value: "1".to_string() + },], fields: vec![StructField { field_name: None, - field_type: DataType::BigNumeric(ExactNumberInfo::None), - options: None, + field_type: DataType::BigNumeric(ExactNumberInfo::None) }] }, expr_from_projection(&select.projection[1]) @@ -1082,17 +944,15 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!(1, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::value(number("1")), Expr::value(number("2")),], + values: vec![Expr::Value(number("1")), Expr::Value(number("2")),], fields: vec![ StructField { field_name: Some("key".into()), field_type: DataType::Int64, - options: None, }, StructField { field_name: Some("value".into()), field_type: DataType::Int64, - options: None, }, ] }, @@ -1110,11 +970,10 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { assert_eq!(3, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::value(number("5"))], + values: vec![Expr::Value(number("5")),], fields: vec![StructField { field_name: None, field_type: DataType::Int64, - options: None, }] }, expr_from_projection(&select.projection[0]) @@ -1122,17 +981,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { assert_eq!( &Expr::Struct { values: vec![ - Expr::value(number("1")), + Expr::Value(number("1")), Expr::CompoundIdentifier(vec![ Ident { value: "t".into(), quote_style: None, - span: Span::empty(), }, Ident { value: "str_col".into(), quote_style: None, - span: Span::empty(), }, ]), ], @@ -1141,24 +998,47 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { field_name: Some(Ident { value: "x".into(), quote_style: None, - span: Span::empty(), }), - field_type: DataType::Int64, - options: None, + field_type: DataType::Int64 }, StructField { field_name: Some(Ident { value: "y".into(), quote_style: None, - span: Span::empty(), }), - field_type: DataType::String(None), - options: None, + field_type: DataType::String(None) }, ] }, expr_from_projection(&select.projection[1]) ); + assert_eq!( + &Expr::Struct { + values: vec![Expr::Identifier(Ident { + value: "nested_col".into(), + quote_style: None, + }),], + fields: vec![ + StructField { + field_name: Some("arr".into()), + field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new( + DataType::Float64 + ))) + }, + StructField { + field_name: Some("str".into()), + field_type: DataType::Struct( + vec![StructField { + field_name: None, + field_type: DataType::Bool + }], + StructBracketKind::AngleBrackets + ) + }, + ] + }, + expr_from_projection(&select.projection[2]) + ); let sql = r#"SELECT STRUCT>(nested_col)"#; let select = bigquery_and_generic().verified_only_select(sql); @@ -1168,23 +1048,20 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { values: vec![Expr::Identifier(Ident { value: "nested_col".into(), quote_style: None, - span: Span::empty(), - })], + }),], fields: vec![ StructField { field_name: Some("x".into()), field_type: DataType::Struct( Default::default(), StructBracketKind::AngleBrackets - ), - options: None, + ) }, StructField { field_name: Some("y".into()), field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new( DataType::Struct(Default::default(), StructBracketKind::AngleBrackets) - ))), - options: None, + ))) }, ] }, @@ -1196,24 +1073,22 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { assert_eq!(2, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::Boolean(true).with_empty_span())], + values: vec![Expr::Value(Value::Boolean(true)),], fields: vec![StructField { field_name: None, - field_type: DataType::Bool, - options: None, + field_type: DataType::Bool }] }, expr_from_projection(&select.projection[0]) ); assert_eq!( &Expr::Struct { - values: vec![Expr::Value( - Value::SingleQuotedByteStringLiteral("abc".into()).with_empty_span() - )], + values: vec![Expr::Value(Value::SingleQuotedByteStringLiteral( + "abc".into() + )),], fields: vec![StructField { field_name: None, - field_type: DataType::Bytes(Some(42)), - options: None, + field_type: DataType::Bytes(Some(42)) }] }, expr_from_projection(&select.projection[1]) @@ -1224,13 +1099,12 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { assert_eq!(4, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value( - Value::SingleQuotedString("2011-05-05".into()).with_empty_span() - )], + values: vec![Expr::Value(Value::SingleQuotedString( + "2011-05-05".to_string() + )),], fields: vec![StructField { field_name: None, - field_type: DataType::Date, - options: None, + field_type: DataType::Date }] }, expr_from_projection(&select.projection[0]) @@ -1239,37 +1113,31 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Datetime(None), - value: ValueWithSpan { - value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()), - span: Span::empty(), - } - }], + value: "1999-01-01 01:23:34.45".to_string() + },], fields: vec![StructField { field_name: None, - field_type: DataType::Datetime(None), - options: None, + field_type: DataType::Datetime(None) }] }, expr_from_projection(&select.projection[1]) ); assert_eq!( &Expr::Struct { - values: vec![Expr::value(number("5.0"))], + values: vec![Expr::Value(number("5.0")),], fields: vec![StructField { field_name: None, - field_type: DataType::Float64, - options: None, + field_type: DataType::Float64 }] }, expr_from_projection(&select.projection[2]) ); assert_eq!( &Expr::Struct { - values: vec![Expr::value(number("1"))], + values: vec![Expr::Value(number("1")),], fields: vec![StructField { field_name: None, - field_type: DataType::Int64, - options: None, + field_type: DataType::Int64 }] }, expr_from_projection(&select.projection[3]) @@ -1281,18 +1149,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { assert_eq!( &Expr::Struct { values: vec![Expr::Interval(Interval { - value: Box::new(Expr::Value( - Value::SingleQuotedString("1".into()).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString("1".to_string()))), leading_field: Some(DateTimeField::Month), leading_precision: None, last_field: None, fractional_seconds_precision: None - })], + }),], fields: vec![StructField { field_name: None, - field_type: DataType::Interval, - options: None, + field_type: DataType::Interval }] }, expr_from_projection(&select.projection[0]) @@ -1301,17 +1166,11 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::JSON, - value: ValueWithSpan { - value: Value::SingleQuotedString( - r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into() - ), - span: Span::empty(), - } - }], + value: r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.to_string() + },], fields: vec![StructField { field_name: None, - field_type: DataType::JSON, - options: None, + field_type: DataType::JSON }] }, expr_from_projection(&select.projection[1]) @@ -1322,13 +1181,10 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { assert_eq!(3, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value( - Value::SingleQuotedString("foo".into()).with_empty_span() - )], + values: vec![Expr::Value(Value::SingleQuotedString("foo".to_string())),], fields: vec![StructField { field_name: None, - field_type: DataType::String(Some(42)), - options: None, + field_type: DataType::String(Some(42)) }] }, expr_from_projection(&select.projection[0]) @@ -1337,17 +1193,11 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Timestamp(None, TimezoneInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString( - "2008-12-25 15:30:00 America/Los_Angeles".into() - ), - span: Span::empty(), - } - }], + value: "2008-12-25 15:30:00 America/Los_Angeles".to_string() + },], fields: vec![StructField { field_name: None, - field_type: DataType::Timestamp(None, TimezoneInfo::None), - options: None, + field_type: DataType::Timestamp(None, TimezoneInfo::None) }] }, expr_from_projection(&select.projection[1]) @@ -1357,15 +1207,11 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Time(None, TimezoneInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString("15:30:00".into()), - span: Span::empty(), - } - }], + value: "15:30:00".to_string() + },], fields: vec![StructField { field_name: None, - field_type: DataType::Time(None, TimezoneInfo::None), - options: None, + field_type: DataType::Time(None, TimezoneInfo::None) }] }, expr_from_projection(&select.projection[2]) @@ -1378,15 +1224,11 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Numeric(ExactNumberInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString("1".into()), - span: Span::empty(), - } - }], + value: "1".to_string() + },], fields: vec![StructField { field_name: None, - field_type: DataType::Numeric(ExactNumberInfo::None), - options: None, + field_type: DataType::Numeric(ExactNumberInfo::None) }] }, expr_from_projection(&select.projection[0]) @@ -1395,15 +1237,11 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString("1".into()), - span: Span::empty(), - } - }], + value: "1".to_string() + },], fields: vec![StructField { field_name: None, - field_type: DataType::BigNumeric(ExactNumberInfo::None), - options: None, + field_type: DataType::BigNumeric(ExactNumberInfo::None) }] }, expr_from_projection(&select.projection[1]) @@ -1412,50 +1250,44 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { #[test] fn parse_typed_struct_with_field_name_bigquery() { - let sql = r#"SELECT STRUCT(5), STRUCT("foo")"#; // line 1, column 1 + let sql = r#"SELECT STRUCT(5), STRUCT("foo")"#; let select = bigquery().verified_only_select(sql); assert_eq!(2, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::value(number("5"))], + values: vec![Expr::Value(number("5")),], fields: vec![StructField { field_name: Some(Ident::from("x")), - field_type: DataType::Int64, - options: None, + field_type: DataType::Int64 }] }, expr_from_projection(&select.projection[0]) ); assert_eq!( &Expr::Struct { - values: vec![Expr::Value( - Value::DoubleQuotedString("foo".into()).with_empty_span() - )], + values: vec![Expr::Value(Value::DoubleQuotedString("foo".to_string())),], fields: vec![StructField { field_name: Some(Ident::from("y")), - field_type: DataType::String(None), - options: None, + field_type: DataType::String(None) }] }, expr_from_projection(&select.projection[1]) ); - let sql = r#"SELECT STRUCT(5, 5)"#; // line 1, column 1 + let sql = r#"SELECT STRUCT(5, 5)"#; let select = bigquery().verified_only_select(sql); assert_eq!(1, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::value(number("5")), Expr::value(number("5")),], + values: vec![Expr::Value(number("5")), Expr::Value(number("5")),], fields: vec![ StructField { field_name: Some(Ident::from("x")), - field_type: DataType::Int64, - options: None, + field_type: DataType::Int64 }, StructField { field_name: Some(Ident::from("y")), - field_type: DataType::Int64, - options: None, + field_type: DataType::Int64 } ] }, @@ -1465,50 +1297,44 @@ fn parse_typed_struct_with_field_name_bigquery() { #[test] fn parse_typed_struct_with_field_name_bigquery_and_generic() { - let sql = r#"SELECT STRUCT(5), STRUCT('foo')"#; // line 1, column 1 + let sql = r#"SELECT STRUCT(5), STRUCT('foo')"#; let select = bigquery().verified_only_select(sql); assert_eq!(2, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::value(number("5"))], + values: vec![Expr::Value(number("5")),], fields: vec![StructField { field_name: Some(Ident::from("x")), - field_type: DataType::Int64, - options: None, + field_type: DataType::Int64 }] }, expr_from_projection(&select.projection[0]) ); assert_eq!( &Expr::Struct { - values: vec![Expr::Value( - Value::SingleQuotedString("foo".into()).with_empty_span() - )], + values: vec![Expr::Value(Value::SingleQuotedString("foo".to_string())),], fields: vec![StructField { field_name: Some(Ident::from("y")), - field_type: DataType::String(None), - options: None, + field_type: DataType::String(None) }] }, expr_from_projection(&select.projection[1]) ); - let sql = r#"SELECT STRUCT(5, 5)"#; // line 1, column 1 + let sql = r#"SELECT STRUCT(5, 5)"#; let select = bigquery_and_generic().verified_only_select(sql); assert_eq!(1, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::value(number("5")), Expr::value(number("5")),], + values: vec![Expr::Value(number("5")), Expr::Value(number("5")),], fields: vec![ StructField { field_name: Some(Ident::from("x")), - field_type: DataType::Int64, - options: None, + field_type: DataType::Int64 }, StructField { field_name: Some(Ident::from("y")), - field_type: DataType::Int64, - options: None, + field_type: DataType::Int64 } ] }, @@ -1539,7 +1365,15 @@ fn parse_table_identifiers() { assert_eq!( select.from, vec![TableWithJoins { - relation: table_from_name(ObjectName::from(expected)), + relation: TableFactor::Table { + name: ObjectName(expected), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![] },] ); @@ -1673,20 +1507,6 @@ fn parse_hyphenated_table_identifiers() { "SELECT * FROM foo-bar AS f JOIN baz-qux AS b ON f.id = b.id", ); - assert_eq!( - bigquery() - .verified_only_select_with_canonical( - "select * from foo-123.bar", - "SELECT * FROM foo-123.bar" - ) - .from[0] - .relation, - table_from_name(ObjectName::from(vec![ - Ident::new("foo-123"), - Ident::new("bar") - ])), - ); - assert_eq!( bigquery() .verified_only_select_with_canonical( @@ -1703,29 +1523,29 @@ fn parse_hyphenated_table_identifiers() { ])) }) ); + + let error_sql = "select foo-bar.* from foo-bar"; + assert!(bigquery().parse_sql_statements(error_sql).is_err()); } #[test] fn parse_table_time_travel() { let version = "2023-08-18 23:08:18".to_string(); - let sql = format!("SELECT 1 FROM t1 FOR SYSTEM_TIME AS OF '{version}'"); // line 1, column 1 + let sql = format!("SELECT 1 FROM t1 FOR SYSTEM_TIME AS OF '{version}'"); let select = bigquery().verified_only_select(&sql); assert_eq!( select.from, vec![TableWithJoins { relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("t1")]), + name: ObjectName(vec![Ident::new("t1")]), alias: None, args: None, with_hints: vec![], version: Some(TableVersion::ForSystemTimeAsOf(Expr::Value( - Value::SingleQuotedString(version).with_empty_span() + Value::SingleQuotedString(version) ))), partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }, joins: vec![] },] @@ -1756,7 +1576,7 @@ fn parse_join_constraint_unnest_alias() { with_ordinality: false, }, global: false, - join_operator: JoinOperator::Join(JoinConstraint::On(Expr::BinaryOp { + join_operator: JoinOperator::Inner(JoinConstraint::On(Expr::BinaryOp { left: Box::new(Expr::Identifier("c1".into())), op: BinaryOperator::Eq, right: Box::new(Expr::Identifier("c2".into())), @@ -1788,22 +1608,21 @@ fn parse_merge() { columns: vec![Ident::new("product"), Ident::new("quantity")], kind: MergeInsertKind::Values(Values { explicit_row: false, - rows: vec![vec![Expr::value(number("1")), Expr::value(number("2"))]], + rows: vec![vec![Expr::Value(number("1")), Expr::Value(number("2"))]], }), }); let update_action = MergeAction::Update { assignments: vec![ Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new("a")])), - value: Expr::value(number("1")), + target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new("a")])), + value: Expr::Value(number("1")), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new("b")])), - value: Expr::value(number("2")), + target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new("b")])), + value: Expr::Value(number("2")), }, ], }; - match bigquery_and_generic().verified_stmt(sql) { Statement::Merge { into, @@ -1811,12 +1630,11 @@ fn parse_merge() { source, on, clauses, - .. } => { assert!(!into); assert_eq!( TableFactor::Table { - name: ObjectName::from(vec![Ident::new("inventory")]), + name: ObjectName(vec![Ident::new("inventory")]), alias: Some(TableAlias { name: Ident::new("T"), columns: vec![], @@ -1826,15 +1644,12 @@ fn parse_merge() { version: Default::default(), partitions: Default::default(), with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }, table ); assert_eq!( TableFactor::Table { - name: ObjectName::from(vec![Ident::new("newArrivals")]), + name: ObjectName(vec![Ident::new("newArrivals")]), alias: Some(TableAlias { name: Ident::new("S"), columns: vec![], @@ -1844,23 +1659,20 @@ fn parse_merge() { version: Default::default(), partitions: Default::default(), with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }, source ); - assert_eq!(Expr::Value(Value::Boolean(false).with_empty_span()), *on); + assert_eq!(Expr::Value(Value::Boolean(false)), *on); assert_eq!( vec![ MergeClause { clause_kind: MergeClauseKind::NotMatched, - predicate: Some(Expr::value(number("1"))), + predicate: Some(Expr::Value(number("1"))), action: insert_action.clone(), }, MergeClause { clause_kind: MergeClauseKind::NotMatchedByTarget, - predicate: Some(Expr::value(number("1"))), + predicate: Some(Expr::Value(number("1"))), action: insert_action.clone(), }, MergeClause { @@ -1870,7 +1682,7 @@ fn parse_merge() { }, MergeClause { clause_kind: MergeClauseKind::NotMatchedBySource, - predicate: Some(Expr::value(number("2"))), + predicate: Some(Expr::Value(number("2"))), action: MergeAction::Delete }, MergeClause { @@ -1880,12 +1692,12 @@ fn parse_merge() { }, MergeClause { clause_kind: MergeClauseKind::NotMatchedBySource, - predicate: Some(Expr::value(number("1"))), + predicate: Some(Expr::Value(number("1"))), action: update_action.clone(), }, MergeClause { clause_kind: MergeClauseKind::NotMatched, - predicate: Some(Expr::value(number("1"))), + predicate: Some(Expr::Value(number("1"))), action: MergeAction::Insert(MergeInsertExpr { columns: vec![Ident::new("product"), Ident::new("quantity"),], kind: MergeInsertKind::Row, @@ -1901,7 +1713,7 @@ fn parse_merge() { }, MergeClause { clause_kind: MergeClauseKind::NotMatched, - predicate: Some(Expr::value(number("1"))), + predicate: Some(Expr::Value(number("1"))), action: MergeAction::Insert(MergeInsertExpr { columns: vec![], kind: MergeInsertKind::Row @@ -1917,7 +1729,7 @@ fn parse_merge() { }, MergeClause { clause_kind: MergeClauseKind::Matched, - predicate: Some(Expr::value(number("1"))), + predicate: Some(Expr::Value(number("1"))), action: MergeAction::Delete, }, MergeClause { @@ -1933,7 +1745,7 @@ fn parse_merge() { kind: MergeInsertKind::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::value(number("1")), + Expr::Value(number("1")), Expr::Identifier(Ident::new("DEFAULT")), ]] }) @@ -1947,7 +1759,7 @@ fn parse_merge() { kind: MergeInsertKind::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::value(number("1")), + Expr::Value(number("1")), Expr::Identifier(Ident::new("DEFAULT")), ]] }) @@ -2063,7 +1875,7 @@ fn parse_array_agg_func() { fn parse_big_query_declare() { for (sql, expected_names, expected_data_type, expected_assigned_expr) in [ ( - "DECLARE x INT64", // line 1, column 1 + "DECLARE x INT64", vec![Ident::new("x")], Some(DataType::Int64), None, @@ -2072,25 +1884,25 @@ fn parse_big_query_declare() { "DECLARE x INT64 DEFAULT 42", vec![Ident::new("x")], Some(DataType::Int64), - Some(DeclareAssignment::Default(Box::new(Expr::Value( - number("42").with_empty_span(), - )))), + Some(DeclareAssignment::Default(Box::new(Expr::Value(number( + "42", + ))))), ), ( "DECLARE x, y, z INT64 DEFAULT 42", vec![Ident::new("x"), Ident::new("y"), Ident::new("z")], Some(DataType::Int64), - Some(DeclareAssignment::Default(Box::new(Expr::Value( - number("42").with_empty_span(), - )))), + Some(DeclareAssignment::Default(Box::new(Expr::Value(number( + "42", + ))))), ), ( "DECLARE x DEFAULT 42", vec![Ident::new("x")], None, - Some(DeclareAssignment::Default(Box::new(Expr::Value( - number("42").with_empty_span(), - )))), + Some(DeclareAssignment::Default(Box::new(Expr::Value(number( + "42", + ))))), ), ] { match bigquery().verified_stmt(sql) { @@ -2139,47 +1951,27 @@ fn parse_map_access_expr() { let sql = "users[-1][safe_offset(2)].a.b"; let expr = bigquery().verified_expr(sql); - let expected = Expr::CompoundFieldAccess { - root: Box::new(Expr::Identifier(Ident::with_span( - Span::new(Location::of(1, 1), Location::of(1, 6)), - "users", - ))), - access_chain: vec![ - AccessExpr::Subscript(Subscript::Index { - index: Expr::UnaryOp { + fn map_access_key(key: Expr, syntax: MapAccessSyntax) -> MapAccessKey { + MapAccessKey { key, syntax } + } + let expected = Expr::MapAccess { + column: Expr::Identifier(Ident::new("users")).into(), + keys: vec![ + map_access_key( + Expr::UnaryOp { op: UnaryOperator::Minus, - expr: Expr::value(number("1")).into(), + expr: Expr::Value(number("1")).into(), }, - }), - AccessExpr::Subscript(Subscript::Index { - index: Expr::Function(Function { - name: ObjectName::from(vec![Ident::with_span( - Span::new(Location::of(1, 11), Location::of(1, 22)), - "safe_offset", - )]), - parameters: FunctionArguments::None, - args: FunctionArguments::List(FunctionArgumentList { - duplicate_treatment: None, - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - number("2").with_empty_span(), - )))], - clauses: vec![], - }), - filter: None, - null_treatment: None, - over: None, - within_group: vec![], - uses_odbc_syntax: false, - }), - }), - AccessExpr::Dot(Expr::Identifier(Ident::with_span( - Span::new(Location::of(1, 24), Location::of(1, 25)), - "a", - ))), - AccessExpr::Dot(Expr::Identifier(Ident::with_span( - Span::new(Location::of(1, 26), Location::of(1, 27)), - "b", - ))), + MapAccessSyntax::Bracket, + ), + map_access_key( + call("safe_offset", [Expr::Value(number("2"))]), + MapAccessSyntax::Bracket, + ), + map_access_key( + Expr::CompoundIdentifier(vec![Ident::new("a"), Ident::new("b")]), + MapAccessSyntax::Period, + ), ], }; assert_eq!(expr, expected); @@ -2201,24 +1993,23 @@ fn test_bigquery_create_function() { let stmt = bigquery().verified_stmt(sql); assert_eq!( stmt, - Statement::CreateFunction(CreateFunction { - or_alter: false, + Statement::CreateFunction { or_replace: true, temporary: true, if_not_exists: false, - name: ObjectName::from(vec![ + name: ObjectName(vec![ Ident::new("project1"), Ident::new("mydataset"), Ident::new("myfunction"), ]), args: Some(vec![OperateFunctionArg::with_name("x", DataType::Float64),]), return_type: Some(DataType::Float64), - function_body: Some(CreateFunctionBody::AsAfterOptions(Expr::Value( - number("42").with_empty_span() - ))), + function_body: Some(CreateFunctionBody::AsAfterOptions(Expr::Value(number( + "42" + )))), options: Some(vec![SqlOption::KeyValue { key: Ident::new("x"), - value: Expr::Value(Value::SingleQuotedString("y".into()).with_empty_span()), + value: Expr::Value(Value::SingleQuotedString("y".into())), }]), behavior: None, using: None, @@ -2227,7 +2018,7 @@ fn test_bigquery_create_function() { remote_connection: None, called_on_null: None, parallel: None, - }) + } ); let sqls = [ @@ -2337,14 +2128,10 @@ fn test_bigquery_trim() { let select = bigquery().verified_only_select(sql_only_select); assert_eq!( &Expr::Trim { - expr: Box::new(Expr::Value( - Value::SingleQuotedString("xyz".to_owned()).with_empty_span() - )), + expr: Box::new(Expr::Value(Value::SingleQuotedString("xyz".to_owned()))), trim_where: None, trim_what: None, - trim_characters: Some(vec![Expr::Value( - Value::SingleQuotedString("a".to_owned()).with_empty_span() - )]), + trim_characters: Some(vec![Expr::Value(Value::SingleQuotedString("a".to_owned()))]), }, expr_from_projection(only(&select.projection)) ); @@ -2371,77 +2158,22 @@ fn parse_extract_weekday() { ); } -#[test] -fn bigquery_select_expr_star() { - bigquery() - .verified_only_select("SELECT STRUCT((SELECT foo FROM T WHERE true)).* FROM T"); - bigquery().verified_only_select("SELECT [STRUCT('foo')][0].* EXCEPT (foo) FROM T"); - bigquery().verified_only_select("SELECT myfunc()[0].* FROM T"); -} - #[test] fn test_select_as_struct() { - for (sql, parse_to) in [ - ( - "SELECT * FROM (SELECT AS STRUCT STRUCT(123 AS a, false AS b))", - "SELECT * FROM (SELECT AS STRUCT STRUCT(123 AS a, false AS b))", - ), - ( - "SELECT * FROM (SELECT DISTINCT AS STRUCT STRUCT(123 AS a, false AS b))", - "SELECT * FROM (SELECT DISTINCT AS STRUCT STRUCT(123 AS a, false AS b))", - ), - ( - "SELECT * FROM (SELECT ALL AS STRUCT STRUCT(123 AS a, false AS b))", - "SELECT * FROM (SELECT AS STRUCT STRUCT(123 AS a, false AS b))", - ), - ] { - bigquery().one_statement_parses_to(sql, parse_to); - } - + bigquery().verified_only_select("SELECT * FROM (SELECT AS VALUE STRUCT(123 AS a, false AS b))"); let select = bigquery().verified_only_select("SELECT AS STRUCT 1 AS a, 2 AS b"); assert_eq!(Some(ValueTableMode::AsStruct), select.value_table_mode); } #[test] fn test_select_as_value() { - for (sql, parse_to) in [ - ( - "SELECT * FROM (SELECT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))", - "SELECT * FROM (SELECT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))", - ), - ( - "SELECT * FROM (SELECT DISTINCT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))", - "SELECT * FROM (SELECT DISTINCT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))", - ), - ( - "SELECT * FROM (SELECT ALL AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))", - "SELECT * FROM (SELECT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))", - ), - ] { - bigquery().one_statement_parses_to(sql, parse_to); - } - + bigquery().verified_only_select( + "SELECT * FROM (SELECT AS VALUE STRUCT(5 AS star_rating, false AS up_down_rating))", + ); let select = bigquery().verified_only_select("SELECT AS VALUE STRUCT(1 AS a, 2 AS b) AS xyz"); assert_eq!(Some(ValueTableMode::AsValue), select.value_table_mode); } -#[test] -fn test_triple_quote_typed_strings() { - bigquery().verified_expr(r#"JSON '''{"foo":"bar's"}'''"#); - - let expr = bigquery().verified_expr(r#"JSON """{"foo":"bar's"}""""#); - assert_eq!( - Expr::TypedString { - data_type: DataType::JSON, - value: ValueWithSpan { - value: Value::TripleDoubleQuotedString(r#"{"foo":"bar's"}"#.into()), - span: Span::empty(), - } - }, - expr - ); -} - #[test] fn test_array_agg() { bigquery_and_generic().verified_expr("ARRAY_AGG(state)"); @@ -2462,107 +2194,3 @@ fn test_any_value() { bigquery_and_generic().verified_expr("ANY_VALUE(fruit HAVING MAX sold)"); bigquery_and_generic().verified_expr("ANY_VALUE(fruit HAVING MIN sold)"); } - -#[test] -fn test_any_type() { - bigquery().verified_stmt(concat!( - "CREATE OR REPLACE TEMPORARY FUNCTION ", - "my_function(param1 ANY TYPE) ", - "AS (", - "(SELECT 1)", - ")", - )); -} - -#[test] -fn test_any_type_dont_break_custom_type() { - bigquery_and_generic().verified_stmt("CREATE TABLE foo (x ANY)"); -} - -#[test] -fn test_struct_field_options() { - bigquery().verified_stmt(concat!( - "CREATE TABLE my_table (", - "f0 STRUCT, ", - "f1 STRUCT<", - "a STRING OPTIONS(description = 'This is a string', type = 'string'), ", - "b INT64", - "> OPTIONS(description = 'This is a struct field')", - ")", - )); -} - -#[test] -fn test_struct_trailing_and_nested_bracket() { - bigquery().verified_stmt(concat!( - "CREATE TABLE my_table (", - "f0 STRING, ", - "f1 STRUCT>, ", - "f2 STRING", - ")", - )); - - // More complex nested structs - bigquery().verified_stmt(concat!( - "CREATE TABLE my_table (", - "f0 STRING, ", - "f1 STRUCT>>, ", - "f2 STRUCT>>>, ", - "f3 STRUCT>", - ")", - )); - - // Bad case with missing closing bracket - assert_eq!( - ParserError::ParserError("Expected: >, found: )".to_owned()), - bigquery() - .parse_sql_statements("CREATE TABLE my_table(f1 STRUCT after parsing data type STRUCT)".to_owned() - ), - bigquery() - .parse_sql_statements("CREATE TABLE my_table(f1 STRUCT>)") - .unwrap_err() - ); - - // Base case with redundant closing bracket in nested struct - assert_eq!( - ParserError::ParserError( - "Expected: ',' or ')' after column definition, found: >".to_owned() - ), - bigquery() - .parse_sql_statements("CREATE TABLE my_table(f1 STRUCT>>, c INT64)") - .unwrap_err() - ); - - let sql = "SELECT STRUCT>(NULL)"; - assert_eq!( - bigquery_and_generic() - .parse_sql_statements(sql) - .unwrap_err(), - ParserError::ParserError("unmatched > in STRUCT literal".to_string()) - ); - - let sql = "SELECT STRUCT>>(NULL)"; - assert_eq!( - bigquery_and_generic() - .parse_sql_statements(sql) - .unwrap_err(), - ParserError::ParserError("Expected: (, found: >".to_string()) - ); - - let sql = "CREATE TABLE table (x STRUCT>>)"; - assert_eq!( - bigquery_and_generic() - .parse_sql_statements(sql) - .unwrap_err(), - ParserError::ParserError( - "Expected: ',' or ')' after column definition, found: >".to_string() - ) - ); -} diff --git a/tests/sqlparser_clickhouse.rs b/tests/sqlparser_clickhouse.rs index 9e5b6ce8..a7187111 100644 --- a/tests/sqlparser_clickhouse.rs +++ b/tests/sqlparser_clickhouse.rs @@ -21,14 +21,12 @@ #[macro_use] mod test_utils; -use helpers::attached_token::AttachedToken; -use sqlparser::tokenizer::Span; use test_utils::*; -use sqlparser::ast::Expr::{BinaryOp, Identifier}; +use sqlparser::ast::Expr::{BinaryOp, Identifier, MapAccess}; use sqlparser::ast::SelectItem::UnnamedExpr; use sqlparser::ast::TableFactor::Table; -use sqlparser::ast::Value::Boolean; +use sqlparser::ast::Value::Number; use sqlparser::ast::*; use sqlparser::dialect::ClickHouseDialect; use sqlparser::dialect::GenericDialect; @@ -41,29 +39,35 @@ fn parse_map_access_expr() { assert_eq!( Select { distinct: None, - select_token: AttachedToken::empty(), top: None, top_before_distinct: false, - projection: vec![UnnamedExpr(Expr::CompoundFieldAccess { - root: Box::new(Identifier(Ident { + projection: vec![UnnamedExpr(MapAccess { + column: Box::new(Identifier(Ident { value: "string_values".to_string(), quote_style: None, - span: Span::empty(), })), - access_chain: vec![AccessExpr::Subscript(Subscript::Index { - index: call( + keys: vec![MapAccessKey { + key: call( "indexOf", [ Expr::Identifier(Ident::new("string_names")), - Expr::value(Value::SingleQuotedString("endpoint".to_string())) + Expr::Value(Value::SingleQuotedString("endpoint".to_string())) ] ), - })], + syntax: MapAccessSyntax::Bracket + }], })], - exclude: None, into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident::new("foos")])), + relation: Table { + name: ObjectName(vec![Ident::new("foos")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }], lateral_views: vec![], @@ -72,24 +76,25 @@ fn parse_map_access_expr() { left: Box::new(BinaryOp { left: Box::new(Identifier(Ident::new("id"))), op: BinaryOperator::Eq, - right: Box::new(Expr::value(Value::SingleQuotedString("test".to_string()))), + right: Box::new(Expr::Value(Value::SingleQuotedString("test".to_string()))), }), op: BinaryOperator::And, right: Box::new(BinaryOp { - left: Box::new(Expr::CompoundFieldAccess { - root: Box::new(Identifier(Ident::new("string_value"))), - access_chain: vec![AccessExpr::Subscript(Subscript::Index { - index: call( + left: Box::new(MapAccess { + column: Box::new(Identifier(Ident::new("string_value"))), + keys: vec![MapAccessKey { + key: call( "indexOf", [ Expr::Identifier(Ident::new("string_name")), - Expr::value(Value::SingleQuotedString("app".to_string())) + Expr::Value(Value::SingleQuotedString("app".to_string())) ] ), - })], + syntax: MapAccessSyntax::Bracket + }], }), op: BinaryOperator::NotEq, - right: Box::new(Expr::value(Value::SingleQuotedString("foo".to_string()))), + right: Box::new(Expr::Value(Value::SingleQuotedString("foo".to_string()))), }), }), group_by: GroupByExpr::Expressions(vec![], vec![]), @@ -102,7 +107,6 @@ fn parse_map_access_expr() { qualify: None, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }, select ); @@ -115,8 +119,8 @@ fn parse_array_expr() { assert_eq!( &Expr::Array(Array { elem: vec![ - Expr::value(Value::SingleQuotedString("1".to_string())), - Expr::value(Value::SingleQuotedString("2".to_string())), + Expr::Value(Value::SingleQuotedString("1".to_string())), + Expr::Value(Value::SingleQuotedString("2".to_string())), ], named: false, }), @@ -166,12 +170,10 @@ fn parse_delimited_identifiers() { args, with_hints, version, - .. + with_ordinality: _, + partitions: _, } => { - assert_eq!( - ObjectName::from(vec![Ident::with_quote('"', "a table")]), - name - ); + assert_eq!(vec![Ident::with_quote('"', "a table")], name.0); assert_eq!(Ident::with_quote('"', "alias"), alias.unwrap().name); assert!(args.is_none()); assert!(with_hints.is_empty()); @@ -190,8 +192,7 @@ fn parse_delimited_identifiers() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::with_quote('"', "myfun")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::with_quote('"', "myfun")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -220,21 +221,11 @@ fn parse_delimited_identifiers() { #[test] fn parse_create_table() { - clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY ("x")"#); - clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x""#); + clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE=MergeTree ORDER BY ("x")"#); + clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE=MergeTree ORDER BY "x""#); clickhouse().verified_stmt( - r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#, + r#"CREATE TABLE "x" ("a" "int") ENGINE=MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#, ); - clickhouse().one_statement_parses_to( - "CREATE TABLE x (a int) ENGINE = MergeTree() ORDER BY a", - "CREATE TABLE x (a INT) ENGINE = MergeTree ORDER BY a", - ); -} - -#[test] -fn parse_insert_into_function() { - clickhouse().verified_stmt(r#"INSERT INTO TABLE FUNCTION remote('localhost', default.simple_table) VALUES (100, 'inserted via remote()')"#); - clickhouse().verified_stmt(r#"INSERT INTO FUNCTION remote('localhost', default.simple_table) VALUES (100, 'inserted via remote()')"#); } #[test] @@ -311,7 +302,7 @@ fn parse_alter_table_add_projection() { Statement::AlterTable { name, operations, .. } => { - assert_eq!(name, ObjectName::from(vec!["t0".into()])); + assert_eq!(name, ObjectName(vec!["t0".into()])); assert_eq!(1, operations.len()); assert_eq!( operations[0], @@ -328,14 +319,12 @@ fn parse_alter_table_add_projection() { vec![] )), order_by: Some(OrderBy { - kind: OrderByKind::Expressions(vec![OrderByExpr { + exprs: vec![OrderByExpr { expr: Identifier(Ident::new("b")), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, + asc: None, + nulls_first: None, with_fill: None, - }]), + }], interpolate: None, }), } @@ -383,7 +372,7 @@ fn parse_alter_table_drop_projection() { Statement::AlterTable { name, operations, .. } => { - assert_eq!(name, ObjectName::from(vec!["t0".into()])); + assert_eq!(name, ObjectName(vec!["t0".into()])); assert_eq!(1, operations.len()); assert_eq!( operations[0], @@ -416,7 +405,7 @@ fn parse_alter_table_clear_and_materialize_projection() { Statement::AlterTable { name, operations, .. } => { - assert_eq!(name, ObjectName::from(vec!["t0".into()])); + assert_eq!(name, ObjectName(vec!["t0".into()])); assert_eq!(1, operations.len()); assert_eq!( operations[0], @@ -535,6 +524,7 @@ fn column_def(name: Ident, data_type: DataType) -> ColumnDef { ColumnDef { name, data_type, + collation: None, options: vec![], } } @@ -559,7 +549,7 @@ fn parse_clickhouse_data_types() { match clickhouse_and_generic().one_statement_parses_to(sql, &canonical_sql) { Statement::CreateTable(CreateTable { name, columns, .. }) => { - assert_eq!(name, ObjectName::from(vec!["table".into()])); + assert_eq!(name, ObjectName(vec!["table".into()])); assert_eq!( columns, vec![ @@ -594,13 +584,13 @@ fn parse_clickhouse_data_types() { #[test] fn parse_create_table_with_nullable() { - let sql = r#"CREATE TABLE table (k UInt8, `a` Nullable(String), `b` Nullable(DateTime64(9, 'UTC')), c Nullable(DateTime64(9)), d Date32 NULL) ENGINE = MergeTree ORDER BY (`k`)"#; + let sql = r#"CREATE TABLE table (k UInt8, `a` Nullable(String), `b` Nullable(DateTime64(9, 'UTC')), c Nullable(DateTime64(9)), d Date32 NULL) ENGINE=MergeTree ORDER BY (`k`)"#; // ClickHouse has a case-sensitive definition of data type, but canonical representation is not let canonical_sql = sql.replace("String", "STRING"); match clickhouse_and_generic().one_statement_parses_to(sql, &canonical_sql) { Statement::CreateTable(CreateTable { name, columns, .. }) => { - assert_eq!(name, ObjectName::from(vec!["table".into()])); + assert_eq!(name, ObjectName(vec!["table".into()])); assert_eq!( columns, vec![ @@ -623,6 +613,7 @@ fn parse_create_table_with_nullable() { ColumnDef { name: "d".into(), data_type: DataType::Date32, + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Null @@ -648,7 +639,7 @@ fn parse_create_table_with_nested_data_types() { match clickhouse().one_statement_parses_to(sql, "") { Statement::CreateTable(CreateTable { name, columns, .. }) => { - assert_eq!(name, ObjectName::from(vec!["table".into()])); + assert_eq!(name, ObjectName(vec!["table".into()])); assert_eq!( columns, vec![ @@ -666,6 +657,7 @@ fn parse_create_table_with_nested_data_types() { DataType::LowCardinality(Box::new(DataType::String(None))) ) ]), + collation: None, options: vec![], }, ColumnDef { @@ -674,16 +666,15 @@ fn parse_create_table_with_nested_data_types() { DataType::Tuple(vec![ StructField { field_name: None, - field_type: DataType::FixedString(128), - options: None, + field_type: DataType::FixedString(128) }, StructField { field_name: None, - field_type: DataType::Int128, - options: None, + field_type: DataType::Int128 } ]) ))), + collation: None, options: vec![], }, ColumnDef { @@ -692,16 +683,15 @@ fn parse_create_table_with_nested_data_types() { StructField { field_name: Some("a".into()), field_type: DataType::Datetime64(9, None), - options: None, }, StructField { field_name: Some("b".into()), field_type: DataType::Array(ArrayElemTypeDef::Parenthesis( Box::new(DataType::Uuid) - )), - options: None, + )) }, ]), + collation: None, options: vec![], }, ColumnDef { @@ -710,6 +700,7 @@ fn parse_create_table_with_nested_data_types() { Box::new(DataType::String(None)), Box::new(DataType::UInt16) ), + collation: None, options: vec![], }, ] @@ -723,14 +714,14 @@ fn parse_create_table_with_nested_data_types() { fn parse_create_table_with_primary_key() { match clickhouse_and_generic().verified_stmt(concat!( r#"CREATE TABLE db.table (`i` INT, `k` INT)"#, - " ENGINE = SharedMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')", + " ENGINE=SharedMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')", " PRIMARY KEY tuple(i)", " ORDER BY tuple(i)", )) { Statement::CreateTable(CreateTable { name, columns, - table_options, + engine, primary_key, order_by, .. @@ -741,35 +732,30 @@ fn parse_create_table_with_primary_key() { ColumnDef { name: Ident::with_quote('`', "i"), data_type: DataType::Int(None), + collation: None, options: vec![], }, ColumnDef { name: Ident::with_quote('`', "k"), data_type: DataType::Int(None), + collation: None, options: vec![], }, ], columns ); - - let plain_options = match table_options { - CreateTableOptions::Plain(options) => options, - _ => unreachable!(), - }; - - assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( - NamedParenthesizedList { - key: Ident::new("ENGINE"), - name: Some(Ident::new("SharedMergeTree")), - values: vec![ + assert_eq!( + engine, + Some(TableEngine { + name: "SharedMergeTree".to_string(), + parameters: Some(vec![ Ident::with_quote('\'', "/clickhouse/tables/{uuid}/{shard}"), Ident::with_quote('\'', "{replica}"), - ] - } - ))); - + ]), + }) + ); fn assert_function(actual: &Function, name: &str, arg: &str) -> bool { - assert_eq!(actual.name, ObjectName::from(vec![Ident::new(name)])); + assert_eq!(actual.name, ObjectName(vec![Ident::new(name)])); assert_eq!( actual.args, FunctionArguments::List(FunctionArgumentList { @@ -814,7 +800,7 @@ fn parse_create_table_with_variant_default_expressions() { " b DATETIME EPHEMERAL now(),", " c DATETIME EPHEMERAL,", " d STRING ALIAS toString(c)", - ") ENGINE = MergeTree" + ") ENGINE=MergeTree" ); match clickhouse_and_generic().verified_stmt(sql) { Statement::CreateTable(CreateTable { columns, .. }) => { @@ -824,11 +810,11 @@ fn parse_create_table_with_variant_default_expressions() { ColumnDef { name: Ident::new("a"), data_type: DataType::Datetime(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Materialized(Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("now")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("now")]), args: FunctionArguments::List(FunctionArgumentList { args: vec![], duplicate_treatment: None, @@ -845,11 +831,11 @@ fn parse_create_table_with_variant_default_expressions() { ColumnDef { name: Ident::new("b"), data_type: DataType::Datetime(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Ephemeral(Some(Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("now")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("now")]), args: FunctionArguments::List(FunctionArgumentList { args: vec![], duplicate_treatment: None, @@ -866,6 +852,7 @@ fn parse_create_table_with_variant_default_expressions() { ColumnDef { name: Ident::new("c"), data_type: DataType::Datetime(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Ephemeral(None) @@ -874,11 +861,11 @@ fn parse_create_table_with_variant_default_expressions() { ColumnDef { name: Ident::new("d"), data_type: DataType::String(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Alias(Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("toString")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("toString")]), args: FunctionArguments::List(FunctionArgumentList { args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr( Identifier(Ident::new("c")) @@ -905,33 +892,31 @@ fn parse_create_table_with_variant_default_expressions() { fn parse_create_view_with_fields_data_types() { match clickhouse().verified_stmt(r#"CREATE VIEW v (i "int", f "String") AS SELECT * FROM t"#) { Statement::CreateView { name, columns, .. } => { - assert_eq!(name, ObjectName::from(vec!["v".into()])); + assert_eq!(name, ObjectName(vec!["v".into()])); assert_eq!( columns, vec![ ViewColumnDef { name: "i".into(), data_type: Some(DataType::Custom( - ObjectName::from(vec![Ident { + ObjectName(vec![Ident { value: "int".into(), - quote_style: Some('"'), - span: Span::empty(), + quote_style: Some('"') }]), vec![] )), - options: None, + options: None }, ViewColumnDef { name: "f".into(), data_type: Some(DataType::Custom( - ObjectName::from(vec![Ident { + ObjectName(vec![Ident { value: "String".into(), - quote_style: Some('"'), - span: Span::empty(), + quote_style: Some('"') }]), vec![] )), - options: None, + options: None }, ] ); @@ -960,113 +945,42 @@ fn parse_limit_by() { clickhouse_and_generic().verified_stmt( r#"SELECT * FROM default.last_asset_runs_mv ORDER BY created_at DESC LIMIT 1 BY asset, toStartOfDay(created_at)"#, ); - clickhouse_and_generic().parse_sql_statements( - r#"SELECT * FROM default.last_asset_runs_mv ORDER BY created_at DESC BY asset, toStartOfDay(created_at)"#, - ).expect_err("BY without LIMIT"); - clickhouse_and_generic() - .parse_sql_statements("SELECT * FROM T OFFSET 5 BY foo") - .expect_err("BY with OFFSET but without LIMIT"); } #[test] fn parse_settings_in_query() { - fn check_settings(sql: &str, expected: Vec) { - match clickhouse_and_generic().verified_stmt(sql) { - Statement::Query(q) => { - assert_eq!(q.settings, Some(expected)); - } - _ => unreachable!(), + match clickhouse_and_generic() + .verified_stmt(r#"SELECT * FROM t SETTINGS max_threads = 1, max_block_size = 10000"#) + { + Statement::Query(query) => { + assert_eq!( + query.settings, + Some(vec![ + Setting { + key: Ident::new("max_threads"), + value: Number("1".parse().unwrap(), false) + }, + Setting { + key: Ident::new("max_block_size"), + value: Number("10000".parse().unwrap(), false) + }, + ]) + ); } - } - - for (sql, expected_settings) in [ - ( - r#"SELECT * FROM t SETTINGS max_threads = 1, max_block_size = 10000"#, - vec![ - Setting { - key: Ident::new("max_threads"), - value: Expr::value(number("1")), - }, - Setting { - key: Ident::new("max_block_size"), - value: Expr::value(number("10000")), - }, - ], - ), - ( - r#"SELECT * FROM t SETTINGS additional_table_filters = {'table_1': 'x != 2'}"#, - vec![Setting { - key: Ident::new("additional_table_filters"), - value: Expr::Dictionary(vec![DictionaryField { - key: Ident::with_quote('\'', "table_1"), - value: Expr::value(single_quoted_string("x != 2")).into(), - }]), - }], - ), - ( - r#"SELECT * FROM t SETTINGS additional_result_filter = 'x != 2', query_plan_optimize_lazy_materialization = false"#, - vec![ - Setting { - key: Ident::new("additional_result_filter"), - value: Expr::value(single_quoted_string("x != 2")), - }, - Setting { - key: Ident::new("query_plan_optimize_lazy_materialization"), - value: Expr::value(Boolean(false)), - }, - ], - ), - ] { - check_settings(sql, expected_settings); + _ => unreachable!(), } let invalid_cases = vec![ - ("SELECT * FROM t SETTINGS a", "Expected: =, found: EOF"), - ( - "SELECT * FROM t SETTINGS a=", - "Expected: an expression, found: EOF", - ), - ("SELECT * FROM t SETTINGS a=1, b", "Expected: =, found: EOF"), - ( - "SELECT * FROM t SETTINGS a=1, b=", - "Expected: an expression, found: EOF", - ), - ( - "SELECT * FROM t SETTINGS a = {", - "Expected: identifier, found: EOF", - ), - ( - "SELECT * FROM t SETTINGS a = {'b'", - "Expected: :, found: EOF", - ), - ( - "SELECT * FROM t SETTINGS a = {'b': ", - "Expected: an expression, found: EOF", - ), - ( - "SELECT * FROM t SETTINGS a = {'b': 'c',}", - "Expected: identifier, found: }", - ), - ( - "SELECT * FROM t SETTINGS a = {'b': 'c', 'd'}", - "Expected: :, found: }", - ), - ( - "SELECT * FROM t SETTINGS a = {'b': 'c', 'd': }", - "Expected: an expression, found: }", - ), - ( - "SELECT * FROM t SETTINGS a = {ANY(b)}", - "Expected: :, found: (", - ), + "SELECT * FROM t SETTINGS a", + "SELECT * FROM t SETTINGS a=", + "SELECT * FROM t SETTINGS a=1, b", + "SELECT * FROM t SETTINGS a=1, b=", + "SELECT * FROM t SETTINGS a=1, b=c", ]; - for (sql, error_msg) in invalid_cases { - assert_eq!( - clickhouse_and_generic() - .parse_sql_statements(sql) - .unwrap_err(), - ParserError(error_msg.to_string()) - ); + for sql in invalid_cases { + clickhouse_and_generic() + .parse_sql_statements(sql) + .expect_err("Expected: SETTINGS key = value, found: "); } } #[test] @@ -1103,15 +1017,17 @@ fn parse_select_parametric_function() { assert_eq!(parameters.args.len(), 2); assert_eq!( parameters.args[0], - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (Value::Number("0.5".parse().unwrap(), false)).with_empty_span() - ))) + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value(Value::Number( + "0.5".parse().unwrap(), + false + )))) ); assert_eq!( parameters.args[1], - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (Value::Number("0.6".parse().unwrap(), false)).with_empty_span() - ))) + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value(Value::Number( + "0.6".parse().unwrap(), + false + )))) ); } _ => unreachable!(), @@ -1144,6 +1060,61 @@ fn parse_create_materialized_view() { clickhouse_and_generic().verified_stmt(sql); } +#[test] +fn parse_group_by_with_modifier() { + let clauses = ["x", "a, b", "ALL"]; + let modifiers = [ + "WITH ROLLUP", + "WITH CUBE", + "WITH TOTALS", + "WITH ROLLUP WITH CUBE", + ]; + let expected_modifiers = [ + vec![GroupByWithModifier::Rollup], + vec![GroupByWithModifier::Cube], + vec![GroupByWithModifier::Totals], + vec![GroupByWithModifier::Rollup, GroupByWithModifier::Cube], + ]; + for clause in &clauses { + for (modifier, expected_modifier) in modifiers.iter().zip(expected_modifiers.iter()) { + let sql = format!("SELECT * FROM t GROUP BY {clause} {modifier}"); + match clickhouse_and_generic().verified_stmt(&sql) { + Statement::Query(query) => { + let group_by = &query.body.as_select().unwrap().group_by; + if clause == &"ALL" { + assert_eq!(group_by, &GroupByExpr::All(expected_modifier.to_vec())); + } else { + assert_eq!( + group_by, + &GroupByExpr::Expressions( + clause + .split(", ") + .map(|c| Identifier(Ident::new(c))) + .collect(), + expected_modifier.to_vec() + ) + ); + } + } + _ => unreachable!(), + } + } + } + + // invalid cases + let invalid_cases = [ + "SELECT * FROM t GROUP BY x WITH", + "SELECT * FROM t GROUP BY x WITH ROLLUP CUBE", + "SELECT * FROM t GROUP BY x WITH WITH ROLLUP", + "SELECT * FROM t GROUP BY WITH ROLLUP", + ]; + for sql in invalid_cases { + clickhouse_and_generic() + .parse_sql_statements(sql) + .expect_err("Expected: one of ROLLUP or CUBE or TOTALS, found: WITH"); + } +} + #[test] fn parse_select_order_by_with_fill_interpolate() { let sql = "SELECT id, fname, lname FROM customer WHERE id < 5 \ @@ -1155,53 +1126,42 @@ fn parse_select_order_by_with_fill_interpolate() { let select = clickhouse().verified_query(sql); assert_eq!( OrderBy { - kind: OrderByKind::Expressions(vec![ + exprs: vec![ OrderByExpr { expr: Expr::Identifier(Ident::new("fname")), - options: OrderByOptions { - asc: Some(true), - nulls_first: Some(true), - }, + asc: Some(true), + nulls_first: Some(true), with_fill: Some(WithFill { - from: Some(Expr::value(number("10"))), - to: Some(Expr::value(number("20"))), - step: Some(Expr::value(number("2"))), + from: Some(Expr::Value(number("10"))), + to: Some(Expr::Value(number("20"))), + step: Some(Expr::Value(number("2"))), }), }, OrderByExpr { expr: Expr::Identifier(Ident::new("lname")), - options: OrderByOptions { - asc: Some(false), - nulls_first: Some(false), - }, + asc: Some(false), + nulls_first: Some(false), with_fill: Some(WithFill { - from: Some(Expr::value(number("30"))), - to: Some(Expr::value(number("40"))), - step: Some(Expr::value(number("3"))), + from: Some(Expr::Value(number("30"))), + to: Some(Expr::Value(number("40"))), + step: Some(Expr::Value(number("3"))), }), }, - ]), + ], interpolate: Some(Interpolate { exprs: Some(vec![InterpolateExpr { column: Ident::new("col1"), expr: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("col1"))), op: BinaryOperator::Plus, - right: Box::new(Expr::value(number("1"))), + right: Box::new(Expr::Value(number("1"))), }), }]) }) }, select.order_by.expect("ORDER BY expected") ); - assert_eq!( - select.limit_clause, - Some(LimitClause::LimitOffset { - limit: Some(Expr::value(number("2"))), - offset: None, - limit_by: vec![] - }) - ); + assert_eq!(Some(Expr::Value(number("2"))), select.limit); } #[test] @@ -1242,15 +1202,11 @@ fn parse_with_fill() { let select = clickhouse().verified_query(sql); assert_eq!( Some(WithFill { - from: Some(Expr::value(number("10"))), - to: Some(Expr::value(number("20"))), - step: Some(Expr::value(number("2"))), - }) - .as_ref(), - match select.order_by.expect("ORDER BY expected").kind { - OrderByKind::Expressions(ref exprs) => exprs[0].with_fill.as_ref(), - _ => None, - } + from: Some(Expr::Value(number("10"))), + to: Some(Expr::Value(number("20"))), + step: Some(Expr::Value(number("2"))), + }), + select.order_by.expect("ORDER BY expected").exprs[0].with_fill ); } @@ -1285,7 +1241,7 @@ fn parse_interpolate_body_with_columns() { expr: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("col1"))), op: BinaryOperator::Plus, - right: Box::new(Expr::value(number("1"))), + right: Box::new(Expr::Value(number("1"))), }), }, InterpolateExpr { @@ -1297,17 +1253,12 @@ fn parse_interpolate_body_with_columns() { expr: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("col4"))), op: BinaryOperator::Plus, - right: Box::new(Expr::value(number("4"))), + right: Box::new(Expr::Value(number("4"))), }), }, ]) - }) - .as_ref(), - select - .order_by - .expect("ORDER BY expected") - .interpolate - .as_ref() + }), + select.order_by.expect("ORDER BY expected").interpolate ); } @@ -1316,12 +1267,8 @@ fn parse_interpolate_without_body() { let sql = "SELECT fname FROM customer ORDER BY fname WITH FILL INTERPOLATE"; let select = clickhouse().verified_query(sql); assert_eq!( - Some(Interpolate { exprs: None }).as_ref(), - select - .order_by - .expect("ORDER BY expected") - .interpolate - .as_ref() + Some(Interpolate { exprs: None }), + select.order_by.expect("ORDER BY expected").interpolate ); } @@ -1332,13 +1279,8 @@ fn parse_interpolate_with_empty_body() { assert_eq!( Some(Interpolate { exprs: Some(vec![]) - }) - .as_ref(), - select - .order_by - .expect("ORDER BY expected") - .interpolate - .as_ref() + }), + select.order_by.expect("ORDER BY expected").interpolate ); } @@ -1352,9 +1294,7 @@ fn test_prewhere() { Some(&BinaryOp { left: Box::new(Identifier(Ident::new("x"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), + right: Box::new(Expr::Value(Value::Number("1".parse().unwrap(), false))), }) ); let selection = query.as_ref().body.as_select().unwrap().selection.as_ref(); @@ -1363,9 +1303,7 @@ fn test_prewhere() { Some(&BinaryOp { left: Box::new(Identifier(Ident::new("y"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Number("2".parse().unwrap(), false)).with_empty_span() - )), + right: Box::new(Expr::Value(Value::Number("2".parse().unwrap(), false))), }) ); } @@ -1381,17 +1319,13 @@ fn test_prewhere() { left: Box::new(BinaryOp { left: Box::new(Identifier(Ident::new("x"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), + right: Box::new(Expr::Value(Value::Number("1".parse().unwrap(), false))), }), op: BinaryOperator::And, right: Box::new(BinaryOp { left: Box::new(Identifier(Ident::new("y"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Number("2".parse().unwrap(), false)).with_empty_span() - )), + right: Box::new(Expr::Value(Value::Number("2".parse().unwrap(), false))), }), }) ); @@ -1415,16 +1349,16 @@ fn parse_use() { for object_name in &valid_object_names { // Test single identifier without quotes assert_eq!( - clickhouse().verified_stmt(&format!("USE {object_name}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( + clickhouse().verified_stmt(&format!("USE {}", object_name)), + Statement::Use(Use::Object(ObjectName(vec![Ident::new( object_name.to_string() )]))) ); for "e in "e_styles { // Test single identifier with different type of quotes assert_eq!( - clickhouse().verified_stmt(&format!("USE {quote}{object_name}{quote}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( + clickhouse().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)), + Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -1437,7 +1371,7 @@ fn parse_use() { fn test_query_with_format_clause() { let format_options = vec!["TabSeparated", "JSONCompact", "NULL"]; for format in &format_options { - let sql = format!("SELECT * FROM t FORMAT {format}"); + let sql = format!("SELECT * FROM t FORMAT {}", format); match clickhouse_and_generic().verified_stmt(&sql) { Statement::Query(query) => { if *format == "NULL" { @@ -1465,26 +1399,6 @@ fn test_query_with_format_clause() { } } -#[test] -fn test_insert_query_with_format_clause() { - let cases = [ - r#"INSERT INTO tbl FORMAT JSONEachRow {"id": 1, "value": "foo"}, {"id": 2, "value": "bar"}"#, - r#"INSERT INTO tbl FORMAT JSONEachRow ["first", "second", "third"]"#, - r#"INSERT INTO tbl FORMAT JSONEachRow [{"first": 1}]"#, - r#"INSERT INTO tbl (foo) FORMAT JSONAsObject {"foo": {"bar": {"x": "y"}, "baz": 1}}"#, - r#"INSERT INTO tbl (foo, bar) FORMAT JSON {"foo": 1, "bar": 2}"#, - r#"INSERT INTO tbl FORMAT CSV col1, col2, col3"#, - r#"INSERT INTO tbl FORMAT LineAsString "I love apple", "I love banana", "I love orange""#, - r#"INSERT INTO tbl (foo) SETTINGS input_format_json_read_bools_as_numbers = true FORMAT JSONEachRow {"id": 1, "value": "foo"}"#, - r#"INSERT INTO tbl SETTINGS format_template_resultset = '/some/path/resultset.format', format_template_row = '/some/path/row.format' FORMAT Template"#, - r#"INSERT INTO tbl SETTINGS input_format_json_read_bools_as_numbers = true FORMAT JSONEachRow {"id": 1, "value": "foo"}"#, - ]; - - for sql in &cases { - clickhouse().verified_stmt(sql); - } -} - #[test] fn parse_create_table_on_commit_and_as_query() { let sql = r#"CREATE LOCAL TEMPORARY TABLE test ON COMMIT PRESERVE ROWS AS SELECT 1"#; @@ -1499,9 +1413,10 @@ fn parse_create_table_on_commit_and_as_query() { assert_eq!(on_commit, Some(OnCommit::PreserveRows)); assert_eq!( query.unwrap().body.as_select().unwrap().projection, - vec![UnnamedExpr(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - ))] + vec![UnnamedExpr(Expr::Value(Value::Number( + "1".parse().unwrap(), + false + )))] ); } _ => unreachable!(), @@ -1514,9 +1429,9 @@ fn parse_freeze_and_unfreeze_partition() { for operation_name in &["FREEZE", "UNFREEZE"] { let sql = format!("ALTER TABLE t {operation_name} PARTITION '2024-08-14'"); - let expected_partition = Partition::Expr(Expr::Value( - Value::SingleQuotedString("2024-08-14".to_string()).with_empty_span(), - )); + let expected_partition = Partition::Expr(Expr::Value(Value::SingleQuotedString( + "2024-08-14".to_string(), + ))); match clickhouse_and_generic().verified_stmt(&sql) { Statement::AlterTable { operations, .. } => { assert_eq!(operations.len(), 1); @@ -1544,9 +1459,9 @@ fn parse_freeze_and_unfreeze_partition() { match clickhouse_and_generic().verified_stmt(&sql) { Statement::AlterTable { operations, .. } => { assert_eq!(operations.len(), 1); - let expected_partition = Partition::Expr(Expr::Value( - Value::SingleQuotedString("2024-08-14".to_string()).with_empty_span(), - )); + let expected_partition = Partition::Expr(Expr::Value(Value::SingleQuotedString( + "2024-08-14".to_string(), + ))); let expected_operation = if operation_name == &"FREEZE" { AlterTableOperation::FreezePartition { partition: expected_partition, @@ -1620,11 +1535,11 @@ fn parse_select_table_function_settings() { settings: Some(vec![ Setting { key: "s0".into(), - value: Expr::value(number("3")), + value: Value::Number("3".parse().unwrap(), false), }, Setting { key: "s1".into(), - value: Expr::value(single_quoted_string("s")), + value: Value::SingleQuotedString("s".into()), }, ]), }, @@ -1645,11 +1560,11 @@ fn parse_select_table_function_settings() { settings: Some(vec![ Setting { key: "s0".into(), - value: Expr::value(number("3")), + value: Value::Number("3".parse().unwrap(), false), }, Setting { key: "s1".into(), - value: Expr::value(single_quoted_string("s")), + value: Value::SingleQuotedString("s".into()), }, ]), }, @@ -1659,6 +1574,7 @@ fn parse_select_table_function_settings() { "SELECT * FROM t(SETTINGS a=)", "SELECT * FROM t(SETTINGS a=1, b)", "SELECT * FROM t(SETTINGS a=1, b=)", + "SELECT * FROM t(SETTINGS a=1, b=c)", ]; for sql in invalid_cases { clickhouse_and_generic() @@ -1697,14 +1613,6 @@ fn parse_explain_table() { } } -#[test] -fn parse_table_sample() { - clickhouse().verified_stmt("SELECT * FROM tbl SAMPLE 0.1"); - clickhouse().verified_stmt("SELECT * FROM tbl SAMPLE 1000"); - clickhouse().verified_stmt("SELECT * FROM tbl SAMPLE 1 / 10"); - clickhouse().verified_stmt("SELECT * FROM tbl SAMPLE 1 / 10 OFFSET 1 / 2"); -} - fn clickhouse() -> TestedDialects { TestedDialects::new(vec![Box::new(ClickHouseDialect {})]) } diff --git a/tests/sqlparser_common.rs b/tests/sqlparser_common.rs index 15144479..49753a1f 100644 --- a/tests/sqlparser_common.rs +++ b/tests/sqlparser_common.rs @@ -25,7 +25,6 @@ extern crate core; -use helpers::attached_token::AttachedToken; use matches::assert_matches; use sqlparser::ast::SelectItem::UnnamedExpr; use sqlparser::ast::TableFactor::{Pivot, Unpivot}; @@ -35,14 +34,12 @@ use sqlparser::dialect::{ GenericDialect, HiveDialect, MsSqlDialect, MySqlDialect, PostgreSqlDialect, RedshiftSqlDialect, SQLiteDialect, SnowflakeDialect, }; -use sqlparser::keywords::{Keyword, ALL_KEYWORDS}; +use sqlparser::keywords::ALL_KEYWORDS; use sqlparser::parser::{Parser, ParserError, ParserOptions}; use sqlparser::tokenizer::Tokenizer; -use sqlparser::tokenizer::{Location, Span}; use test_utils::{ - all_dialects, all_dialects_where, all_dialects_with_options, alter_table_op, assert_eq_vec, - call, expr_from_projection, join, number, only, table, table_alias, table_from_name, - TestedDialects, + all_dialects, all_dialects_where, alter_table_op, assert_eq_vec, call, expr_from_projection, + join, number, only, table, table_alias, TestedDialects, }; #[macro_use] @@ -51,54 +48,15 @@ mod test_utils; #[cfg(test)] use pretty_assertions::assert_eq; use sqlparser::ast::ColumnOption::Comment; -use sqlparser::ast::DateTimeField::Seconds; use sqlparser::ast::Expr::{Identifier, UnaryOp}; -use sqlparser::ast::Value::Number; use sqlparser::test_utils::all_dialects_except; -#[test] -fn parse_numeric_literal_underscore() { - let dialects = all_dialects_where(|d| d.supports_numeric_literal_underscores()); - - let canonical = if cfg!(feature = "bigdecimal") { - "SELECT 10000" - } else { - "SELECT 10_000" - }; - - let select = dialects.verified_only_select_with_canonical("SELECT 10_000", canonical); - - assert_eq!( - select.projection, - vec![UnnamedExpr(Expr::Value( - (number("10_000")).with_empty_span() - ))] - ); -} - -#[test] -fn parse_function_object_name() { - let select = verified_only_select("SELECT a.b.c.d(1, 2, 3) FROM T"); - let Expr::Function(func) = expr_from_projection(&select.projection[0]) else { - unreachable!() - }; - assert_eq!( - ObjectName::from( - ["a", "b", "c", "d"] - .into_iter() - .map(Ident::new) - .collect::>() - ), - func.name, - ); -} - #[test] fn parse_insert_values() { let row = vec![ - Expr::value(number("1")), - Expr::value(number("2")), - Expr::value(number("3")), + Expr::Value(number("1")), + Expr::Value(number("2")), + Expr::Value(number("3")), ]; let rows1 = vec![row.clone()]; let rows2 = vec![row.clone(), row]; @@ -134,7 +92,7 @@ fn parse_insert_values() { ) { match verified_stmt(sql) { Statement::Insert(Insert { - table: table_name, + table_name, columns, source: Some(source), .. @@ -158,12 +116,6 @@ fn parse_insert_values() { verified_stmt("INSERT INTO customer WITH foo AS (SELECT 1) SELECT * FROM foo UNION VALUES (1)"); } -#[test] -fn parse_insert_set() { - let dialects = all_dialects_where(|d| d.supports_insert_set()); - dialects.verified_stmt("INSERT INTO tbl1 SET col1 = 1, col2 = 'abc', col3 = current_date()"); -} - #[test] fn parse_replace_into() { let dialect = PostgreSqlDialect {}; @@ -187,7 +139,7 @@ fn parse_insert_default_values() { partitioned, returning, source, - table: table_name, + table_name, .. }) => { assert_eq!(columns, vec![]); @@ -196,10 +148,7 @@ fn parse_insert_default_values() { assert_eq!(partitioned, None); assert_eq!(returning, None); assert_eq!(source, None); - assert_eq!( - table_name, - TableObject::TableName(ObjectName::from(vec!["test_table".into()])) - ); + assert_eq!(table_name, ObjectName(vec!["test_table".into()])); } _ => unreachable!(), } @@ -215,7 +164,7 @@ fn parse_insert_default_values() { partitioned, returning, source, - table: table_name, + table_name, .. }) => { assert_eq!(after_columns, vec![]); @@ -224,10 +173,7 @@ fn parse_insert_default_values() { assert_eq!(partitioned, None); assert!(returning.is_some()); assert_eq!(source, None); - assert_eq!( - table_name, - TableObject::TableName(ObjectName::from(vec!["test_table".into()])) - ); + assert_eq!(table_name, ObjectName(vec!["test_table".into()])); } _ => unreachable!(), } @@ -243,7 +189,7 @@ fn parse_insert_default_values() { partitioned, returning, source, - table: table_name, + table_name, .. }) => { assert_eq!(after_columns, vec![]); @@ -252,10 +198,7 @@ fn parse_insert_default_values() { assert_eq!(partitioned, None); assert_eq!(returning, None); assert_eq!(source, None); - assert_eq!( - table_name, - TableObject::TableName(ObjectName::from(vec!["test_table".into()])) - ); + assert_eq!(table_name, ObjectName(vec!["test_table".into()])); } _ => unreachable!(), } @@ -291,13 +234,8 @@ fn parse_insert_default_values() { #[test] fn parse_insert_select_returning() { - // Dialects that support `RETURNING` as a column identifier do - // not support this syntax. - let dialects = - all_dialects_where(|d| !d.is_column_alias(&Keyword::RETURNING, &mut Parser::new(d))); - - dialects.verified_stmt("INSERT INTO t SELECT 1 RETURNING 2"); - let stmt = dialects.verified_stmt("INSERT INTO t SELECT x RETURNING x AS y"); + verified_stmt("INSERT INTO t SELECT 1 RETURNING 2"); + let stmt = verified_stmt("INSERT INTO t SELECT x RETURNING x AS y"); match stmt { Statement::Insert(Insert { returning: Some(ret), @@ -308,27 +246,6 @@ fn parse_insert_select_returning() { } } -#[test] -fn parse_insert_select_from_returning() { - let sql = "INSERT INTO table1 SELECT * FROM table2 RETURNING id"; - match verified_stmt(sql) { - Statement::Insert(Insert { - table: TableObject::TableName(table_name), - source: Some(source), - returning: Some(returning), - .. - }) => { - assert_eq!("table1", table_name.to_string()); - assert!(matches!(*source.body, SetExpr::Select(_))); - assert_eq!( - returning, - vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("id"))),] - ); - } - bad_stmt => unreachable!("Expected valid insert, got {:?}", bad_stmt), - } -} - #[test] fn parse_returning_as_column_alias() { verified_stmt("SELECT 1 AS RETURNING"); @@ -386,16 +303,16 @@ fn parse_update() { assignments, vec![ Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec!["a".into()])), - value: Expr::value(number("1")), + target: AssignmentTarget::ColumnName(ObjectName(vec!["a".into()])), + value: Expr::Value(number("1")), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec!["b".into()])), - value: Expr::value(number("2")), + target: AssignmentTarget::ColumnName(ObjectName(vec!["b".into()])), + value: Expr::Value(number("2")), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec!["c".into()])), - value: Expr::value(number("3")), + target: AssignmentTarget::ColumnName(ObjectName(vec!["c".into()])), + value: Expr::Value(number("3")), }, ] ); @@ -439,20 +356,27 @@ fn parse_update_set_from() { stmt, Statement::Update { table: TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident::new("t1")])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident::new("t1")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }, assignments: vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new("name")])), + target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new("name")])), value: Expr::CompoundIdentifier(vec![Ident::new("t2"), Ident::new("name")]) }], - from: Some(UpdateTableFromKind::AfterSet(vec![TableWithJoins { + from: Some(TableWithJoins { relation: TableFactor::Derived { lateral: false, subquery: Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, @@ -460,10 +384,17 @@ fn parse_update_set_from() { SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("name"))), SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("id"))), ], - exclude: None, into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident::new("t1")])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident::new("t1")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }], lateral_views: vec![], @@ -482,24 +413,24 @@ fn parse_update_set_from() { window_before_qualify: false, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], }), alias: Some(TableAlias { name: Ident::new("t2"), columns: vec![], }) }, - joins: vec![] - }])), + joins: vec![], + }), selection: Some(Expr::BinaryOp { left: Box::new(Expr::CompoundIdentifier(vec![ Ident::new("t1"), @@ -512,12 +443,8 @@ fn parse_update_set_from() { ])), }), returning: None, - or: None, } ); - - let sql = "UPDATE T SET a = b FROM U, (SELECT foo FROM V) AS W WHERE 1 = 1"; - dialects.verified_stmt(sql); } #[test] @@ -530,12 +457,11 @@ fn parse_update_with_table_alias() { from: _from, selection, returning, - or: None, } => { assert_eq!( TableWithJoins { relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("users")]), + name: ObjectName(vec![Ident::new("users")]), alias: Some(TableAlias { name: Ident::new("u"), columns: vec![], @@ -545,9 +471,6 @@ fn parse_update_with_table_alias() { version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }, joins: vec![], }, @@ -555,13 +478,11 @@ fn parse_update_with_table_alias() { ); assert_eq!( vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![ + target: AssignmentTarget::ColumnName(ObjectName(vec![ Ident::new("u"), Ident::new("username") ])), - value: Expr::Value( - (Value::SingleQuotedString("new_user".to_string())).with_empty_span() - ), + value: Expr::Value(Value::SingleQuotedString("new_user".to_string())), }], assignments ); @@ -572,9 +493,9 @@ fn parse_update_with_table_alias() { Ident::new("username"), ])), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("old_user".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString( + "old_user".to_string() + ))), }), selection ); @@ -584,25 +505,6 @@ fn parse_update_with_table_alias() { } } -#[test] -fn parse_update_or() { - let expect_or_clause = |sql: &str, expected_action: SqliteOnConflict| match verified_stmt(sql) { - Statement::Update { or, .. } => assert_eq!(or, Some(expected_action)), - other => unreachable!("Expected update with or, got {:?}", other), - }; - expect_or_clause( - "UPDATE OR REPLACE t SET n = n + 1", - SqliteOnConflict::Replace, - ); - expect_or_clause( - "UPDATE OR ROLLBACK t SET n = n + 1", - SqliteOnConflict::Rollback, - ); - expect_or_clause("UPDATE OR ABORT t SET n = n + 1", SqliteOnConflict::Abort); - expect_or_clause("UPDATE OR FAIL t SET n = n + 1", SqliteOnConflict::Fail); - expect_or_clause("UPDATE OR IGNORE t SET n = n + 1", SqliteOnConflict::Ignore); -} - #[test] fn parse_select_with_table_alias_as() { // AS is optional @@ -627,35 +529,22 @@ fn parse_select_with_table_alias() { select.from, vec![TableWithJoins { relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("lineitem")]), + name: ObjectName(vec![Ident::new("lineitem")]), alias: Some(TableAlias { name: Ident::new("l"), - columns: vec![ - TableAliasColumnDef::from_name("A"), - TableAliasColumnDef::from_name("B"), - TableAliasColumnDef::from_name("C"), - ], + columns: vec![Ident::new("A"), Ident::new("B"), Ident::new("C"),], }), args: None, with_hints: vec![], version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }, joins: vec![], }] ); } -#[test] -fn parse_analyze() { - verified_stmt("ANALYZE TABLE test_table"); - verified_stmt("ANALYZE test_table"); -} - #[test] fn parse_invalid_table_name() { let ast = all_dialects().run_parser_method("db.public..customer", |parser| { @@ -679,7 +568,15 @@ fn parse_delete_statement() { .. }) => { assert_eq!( - table_from_name(ObjectName::from(vec![Ident::with_quote('"', "table")])), + TableFactor::Table { + name: ObjectName(vec![Ident::with_quote('"', "table")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, from[0].relation ); } @@ -710,25 +607,35 @@ fn parse_delete_statement_for_multi_tables() { .. }) => { assert_eq!( - ObjectName::from(vec![Ident::new("schema1"), Ident::new("table1")]), + ObjectName(vec![Ident::new("schema1"), Ident::new("table1")]), tables[0] ); assert_eq!( - ObjectName::from(vec![Ident::new("schema2"), Ident::new("table2")]), + ObjectName(vec![Ident::new("schema2"), Ident::new("table2")]), tables[1] ); assert_eq!( - table_from_name(ObjectName::from(vec![ - Ident::new("schema1"), - Ident::new("table1") - ])), + TableFactor::Table { + name: ObjectName(vec![Ident::new("schema1"), Ident::new("table1")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, from[0].relation ); assert_eq!( - table_from_name(ObjectName::from(vec![ - Ident::new("schema2"), - Ident::new("table2") - ])), + TableFactor::Table { + name: ObjectName(vec![Ident::new("schema2"), Ident::new("table2")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, from[0].joins[0].relation ); } @@ -746,31 +653,51 @@ fn parse_delete_statement_for_multi_tables_with_using() { .. }) => { assert_eq!( - table_from_name(ObjectName::from(vec![ - Ident::new("schema1"), - Ident::new("table1") - ])), + TableFactor::Table { + name: ObjectName(vec![Ident::new("schema1"), Ident::new("table1")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, from[0].relation ); assert_eq!( - table_from_name(ObjectName::from(vec![ - Ident::new("schema2"), - Ident::new("table2") - ])), + TableFactor::Table { + name: ObjectName(vec![Ident::new("schema2"), Ident::new("table2")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, from[1].relation ); assert_eq!( - table_from_name(ObjectName::from(vec![ - Ident::new("schema1"), - Ident::new("table1") - ])), + TableFactor::Table { + name: ObjectName(vec![Ident::new("schema1"), Ident::new("table1")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, using[0].relation ); assert_eq!( - table_from_name(ObjectName::from(vec![ - Ident::new("schema2"), - Ident::new("table2") - ])), + TableFactor::Table { + name: ObjectName(vec![Ident::new("schema2"), Ident::new("table2")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, using[0].joins[0].relation ); } @@ -793,7 +720,15 @@ fn parse_where_delete_statement() { .. }) => { assert_eq!( - table_from_name(ObjectName::from(vec![Ident::new("foo")])), + TableFactor::Table { + name: ObjectName(vec![Ident::new("foo")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, from[0].relation, ); @@ -802,7 +737,7 @@ fn parse_where_delete_statement() { Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("name"))), op: Eq, - right: Box::new(Expr::value(number("5"))), + right: Box::new(Expr::Value(number("5"))), }, selection.unwrap(), ); @@ -828,7 +763,7 @@ fn parse_where_delete_with_alias_statement() { }) => { assert_eq!( TableFactor::Table { - name: ObjectName::from(vec![Ident::new("basket")]), + name: ObjectName(vec![Ident::new("basket")]), alias: Some(TableAlias { name: Ident::new("a"), columns: vec![], @@ -838,16 +773,13 @@ fn parse_where_delete_with_alias_statement() { version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }, from[0].relation, ); assert_eq!( Some(vec![TableWithJoins { relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("basket")]), + name: ObjectName(vec![Ident::new("basket")]), alias: Some(TableAlias { name: Ident::new("b"), columns: vec![], @@ -857,9 +789,6 @@ fn parse_where_delete_with_alias_statement() { version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }, joins: vec![], }]), @@ -891,7 +820,7 @@ fn parse_top_level() { verified_stmt("(SELECT 1)"); verified_stmt("((SELECT 1))"); verified_stmt("VALUES (1)"); - verified_stmt("VALUES ROW(1, NULL, 'a'), ROW(2, NULL, 'b')"); + verified_stmt("VALUES ROW(1, true, 'a'), ROW(2, false, 'b')"); } #[test] @@ -901,12 +830,7 @@ fn parse_simple_select() { assert!(select.distinct.is_none()); assert_eq!(3, select.projection.len()); let select = verified_query(sql); - let expected_limit_clause = LimitClause::LimitOffset { - limit: Some(Expr::value(number("5"))), - offset: None, - limit_by: vec![], - }; - assert_eq!(Some(expected_limit_clause), select.limit_clause); + assert_eq!(Some(Expr::Value(number("5"))), select.limit); } #[test] @@ -914,31 +838,14 @@ fn parse_limit() { verified_stmt("SELECT * FROM user LIMIT 1"); } -#[test] -fn parse_invalid_limit_by() { - all_dialects() - .parse_sql_statements("SELECT * FROM user BY name") - .expect_err("BY without LIMIT"); -} - #[test] fn parse_limit_is_not_an_alias() { // In dialects supporting LIMIT it shouldn't be parsed as a table alias let ast = verified_query("SELECT id FROM customer LIMIT 1"); - let expected_limit_clause = LimitClause::LimitOffset { - limit: Some(Expr::value(number("1"))), - offset: None, - limit_by: vec![], - }; - assert_eq!(Some(expected_limit_clause), ast.limit_clause); + assert_eq!(Some(Expr::Value(number("1"))), ast.limit); let ast = verified_query("SELECT 1 LIMIT 5"); - let expected_limit_clause = LimitClause::LimitOffset { - limit: Some(Expr::value(number("5"))), - offset: None, - limit_by: vec![], - }; - assert_eq!(Some(expected_limit_clause), ast.limit_clause); + assert_eq!(Some(Expr::Value(number("5"))), ast.limit); } #[test] @@ -980,44 +887,6 @@ fn parse_select_distinct_tuple() { ); } -#[test] -fn parse_outer_join_operator() { - let dialects = all_dialects_where(|d| d.supports_outer_join_operator()); - - let select = dialects.verified_only_select("SELECT 1 FROM T WHERE a = b (+)"); - assert_eq!( - select.selection, - Some(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("a"))), - op: BinaryOperator::Eq, - right: Box::new(Expr::OuterJoin(Box::new(Expr::Identifier(Ident::new("b"))))) - }) - ); - - let select = dialects.verified_only_select("SELECT 1 FROM T WHERE t1.c1 = t2.c2.d3 (+)"); - assert_eq!( - select.selection, - Some(Expr::BinaryOp { - left: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("t1"), - Ident::new("c1") - ])), - op: BinaryOperator::Eq, - right: Box::new(Expr::OuterJoin(Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("t2"), - Ident::new("c2"), - Ident::new("d3"), - ])))) - }) - ); - - let res = dialects.parse_sql_statements("SELECT 1 FROM T WHERE 1 = 2 (+)"); - assert_eq!( - ParserError::ParserError("Expected: column identifier before (+), found: 2".to_string()), - res.unwrap_err() - ); -} - #[test] fn parse_select_distinct_on() { let sql = "SELECT DISTINCT ON (album_id) name FROM track ORDER BY album_id, milliseconds"; @@ -1075,7 +944,7 @@ fn parse_select_into() { temporary: false, unlogged: false, table: false, - name: ObjectName::from(vec![Ident::new("table0")]), + name: ObjectName(vec![Ident::new("table0")]), }, only(&select.into) ); @@ -1108,7 +977,7 @@ fn parse_select_wildcard() { let select = verified_only_select(sql); assert_eq!( &SelectItem::QualifiedWildcard( - SelectItemQualifiedWildcardKind::ObjectName(ObjectName::from(vec![Ident::new("foo")])), + ObjectName(vec![Ident::new("foo")]), WildcardAdditionalOptions::default() ), only(&select.projection) @@ -1118,10 +987,7 @@ fn parse_select_wildcard() { let select = verified_only_select(sql); assert_eq!( &SelectItem::QualifiedWildcard( - SelectItemQualifiedWildcardKind::ObjectName(ObjectName::from(vec![ - Ident::new("myschema"), - Ident::new("mytable"), - ])), + ObjectName(vec![Ident::new("myschema"), Ident::new("mytable"),]), WildcardAdditionalOptions::default(), ), only(&select.projection) @@ -1156,7 +1022,7 @@ fn parse_column_aliases() { } = only(&select.projection) { assert_eq!(&BinaryOperator::Plus, op); - assert_eq!(&Expr::value(number("1")), right.as_ref()); + assert_eq!(&Expr::Value(number("1")), right.as_ref()); assert_eq!(&Ident::new("newname"), alias); } else { panic!("Expected: ExprWithAlias") @@ -1166,88 +1032,6 @@ fn parse_column_aliases() { one_statement_parses_to("SELECT a.col + 1 newname FROM foo AS a", sql); } -#[test] -fn parse_select_expr_star() { - let dialects = all_dialects_where(|d| d.supports_select_expr_star()); - - // Identifier wildcard expansion. - let select = dialects.verified_only_select("SELECT foo.bar.* FROM T"); - let SelectItem::QualifiedWildcard(SelectItemQualifiedWildcardKind::ObjectName(object_name), _) = - only(&select.projection) - else { - unreachable!( - "expected wildcard select item: got {:?}", - &select.projection[0] - ) - }; - assert_eq!( - &ObjectName::from( - ["foo", "bar"] - .into_iter() - .map(Ident::new) - .collect::>() - ), - object_name - ); - - // Arbitrary compound expression with wildcard expansion. - let select = dialects.verified_only_select("SELECT foo - bar.* FROM T"); - let SelectItem::QualifiedWildcard( - SelectItemQualifiedWildcardKind::Expr(Expr::BinaryOp { left, op, right }), - _, - ) = only(&select.projection) - else { - unreachable!( - "expected wildcard select item: got {:?}", - &select.projection[0] - ) - }; - let (Expr::Identifier(left), BinaryOperator::Minus, Expr::Identifier(right)) = - (left.as_ref(), op, right.as_ref()) - else { - unreachable!("expected binary op expr: got {:?}", &select.projection[0]) - }; - assert_eq!(&Ident::new("foo"), left); - assert_eq!(&Ident::new("bar"), right); - - // Arbitrary expression wildcard expansion. - let select = dialects.verified_only_select("SELECT myfunc().foo.* FROM T"); - let SelectItem::QualifiedWildcard( - SelectItemQualifiedWildcardKind::Expr(Expr::CompoundFieldAccess { root, access_chain }), - _, - ) = only(&select.projection) - else { - unreachable!("expected wildcard expr: got {:?}", &select.projection[0]) - }; - assert!(matches!(root.as_ref(), Expr::Function(_))); - assert_eq!(1, access_chain.len()); - assert!(matches!( - &access_chain[0], - AccessExpr::Dot(Expr::Identifier(_)) - )); - - dialects.one_statement_parses_to( - "SELECT 2. * 3 FROM T", - #[cfg(feature = "bigdecimal")] - "SELECT 2 * 3 FROM T", - #[cfg(not(feature = "bigdecimal"))] - "SELECT 2. * 3 FROM T", - ); - dialects.verified_only_select("SELECT myfunc().* FROM T"); - - // Invalid - let res = dialects.parse_sql_statements("SELECT foo.*.* FROM T"); - assert_eq!( - ParserError::ParserError("Expected: end of statement, found: .".to_string()), - res.unwrap_err() - ); - - let dialects = all_dialects_where(|d| { - d.supports_select_expr_star() && d.supports_select_wildcard_except() - }); - dialects.verified_only_select("SELECT myfunc().* EXCEPT (foo) FROM T"); -} - #[test] fn test_eof_after_as() { let res = parse_sql_statements("SELECT foo AS"); @@ -1280,8 +1064,7 @@ fn parse_select_count_wildcard() { let select = verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("COUNT")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("COUNT")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -1303,8 +1086,7 @@ fn parse_select_count_distinct() { let select = verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("COUNT")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("COUNT")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: Some(DuplicateTreatment::Distinct), @@ -1387,7 +1169,7 @@ fn parse_null_in_select() { let sql = "SELECT NULL"; let select = verified_only_select(sql); assert_eq!( - &Expr::Value((Value::Null).with_empty_span()), + &Expr::Value(Value::Null), expr_from_projection(only(&select.projection)), ); } @@ -1423,18 +1205,18 @@ fn parse_exponent_in_select() -> Result<(), ParserError> { assert_eq!( &vec![ - SelectItem::UnnamedExpr(Expr::Value((number("10e-20")).with_empty_span())), - SelectItem::UnnamedExpr(Expr::value(number("1e3"))), - SelectItem::UnnamedExpr(Expr::Value((number("1e+3")).with_empty_span())), + SelectItem::UnnamedExpr(Expr::Value(number("10e-20"))), + SelectItem::UnnamedExpr(Expr::Value(number("1e3"))), + SelectItem::UnnamedExpr(Expr::Value(number("1e+3"))), SelectItem::ExprWithAlias { - expr: Expr::value(number("1e3")), + expr: Expr::Value(number("1e3")), alias: Ident::new("a") }, SelectItem::ExprWithAlias { - expr: Expr::value(number("1")), + expr: Expr::Value(number("1")), alias: Ident::new("e") }, - SelectItem::UnnamedExpr(Expr::value(number("0.5e2"))), + SelectItem::UnnamedExpr(Expr::Value(number("0.5e2"))), ], &select.projection ); @@ -1450,7 +1232,6 @@ fn parse_select_with_date_column_name() { &Expr::Identifier(Ident { value: "date".into(), quote_style: None, - span: Span::empty(), }), expr_from_projection(only(&select.projection)), ); @@ -1468,9 +1249,9 @@ fn parse_escaped_single_quote_string_predicate_with_escape() { Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("salary"))), op: NotEq, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("Jim's salary".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString( + "Jim's salary".to_string() + ))), }), ast.selection, ); @@ -1494,9 +1275,9 @@ fn parse_escaped_single_quote_string_predicate_with_no_escape() { Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("salary"))), op: NotEq, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("Jim''s salary".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString( + "Jim''s salary".to_string() + ))), }), ast.selection, ); @@ -1509,14 +1290,11 @@ fn parse_number() { #[cfg(feature = "bigdecimal")] assert_eq!( expr, - Expr::Value((Value::Number(bigdecimal::BigDecimal::from(1), false)).with_empty_span()) + Expr::Value(Value::Number(bigdecimal::BigDecimal::from(1), false)) ); #[cfg(not(feature = "bigdecimal"))] - assert_eq!( - expr, - Expr::Value((Value::Number("1.0".into(), false)).with_empty_span()) - ); + assert_eq!(expr, Expr::Value(Value::Number("1.0".into(), false))); } #[test] @@ -1618,10 +1396,6 @@ fn pg_and_generic() -> TestedDialects { ]) } -fn ms_and_generic() -> TestedDialects { - TestedDialects::new(vec![Box::new(MsSqlDialect {}), Box::new(GenericDialect {})]) -} - #[test] fn parse_json_ops_without_colon() { use self::BinaryOperator::*; @@ -1654,183 +1428,6 @@ fn parse_json_ops_without_colon() { } } -#[test] -fn parse_json_object() { - let dialects = TestedDialects::new(vec![ - Box::new(MsSqlDialect {}), - Box::new(PostgreSqlDialect {}), - ]); - let select = dialects.verified_only_select("SELECT JSON_OBJECT('name' : 'value', 'type' : 1)"); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, .. }), - .. - }) => assert_eq!( - &[ - FunctionArg::ExprNamed { - name: Expr::Value((Value::SingleQuotedString("name".into())).with_empty_span()), - arg: FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("value".into())).with_empty_span() - )), - operator: FunctionArgOperator::Colon - }, - FunctionArg::ExprNamed { - name: Expr::Value((Value::SingleQuotedString("type".into())).with_empty_span()), - arg: FunctionArgExpr::Expr(Expr::value(number("1"))), - operator: FunctionArgOperator::Colon - } - ], - &args[..] - ), - _ => unreachable!(), - } - let select = dialects - .verified_only_select("SELECT JSON_OBJECT('name' : 'value', 'type' : NULL ABSENT ON NULL)"); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), - .. - }) => { - assert_eq!( - &[ - FunctionArg::ExprNamed { - name: Expr::Value( - (Value::SingleQuotedString("name".into())).with_empty_span() - ), - arg: FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("value".into())).with_empty_span() - )), - operator: FunctionArgOperator::Colon - }, - FunctionArg::ExprNamed { - name: Expr::Value( - (Value::SingleQuotedString("type".into())).with_empty_span() - ), - arg: FunctionArgExpr::Expr(Expr::Value((Value::Null).with_empty_span())), - operator: FunctionArgOperator::Colon - } - ], - &args[..] - ); - assert_eq!( - &[FunctionArgumentClause::JsonNullClause( - JsonNullClause::AbsentOnNull - )], - &clauses[..] - ); - } - _ => unreachable!(), - } - let select = dialects.verified_only_select("SELECT JSON_OBJECT(NULL ON NULL)"); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), - .. - }) => { - assert!(args.is_empty()); - assert_eq!( - &[FunctionArgumentClause::JsonNullClause( - JsonNullClause::NullOnNull - )], - &clauses[..] - ); - } - _ => unreachable!(), - } - let select = dialects.verified_only_select("SELECT JSON_OBJECT(ABSENT ON NULL)"); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), - .. - }) => { - assert!(args.is_empty()); - assert_eq!( - &[FunctionArgumentClause::JsonNullClause( - JsonNullClause::AbsentOnNull - )], - &clauses[..] - ); - } - _ => unreachable!(), - } - let select = dialects.verified_only_select( - "SELECT JSON_OBJECT('name' : 'value', 'type' : JSON_ARRAY(1, 2) ABSENT ON NULL)", - ); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), - .. - }) => { - assert_eq!( - &FunctionArg::ExprNamed { - name: Expr::Value((Value::SingleQuotedString("name".into())).with_empty_span()), - arg: FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("value".into())).with_empty_span() - )), - operator: FunctionArgOperator::Colon - }, - &args[0] - ); - assert!(matches!( - args[1], - FunctionArg::ExprNamed { - name: Expr::Value(ValueWithSpan { - value: Value::SingleQuotedString(_), - span: _ - }), - arg: FunctionArgExpr::Expr(Expr::Function(_)), - operator: FunctionArgOperator::Colon - } - )); - assert_eq!( - &[FunctionArgumentClause::JsonNullClause( - JsonNullClause::AbsentOnNull - )], - &clauses[..] - ); - } - _ => unreachable!(), - } - let select = dialects.verified_only_select( - "SELECT JSON_OBJECT('name' : 'value', 'type' : JSON_OBJECT('type_id' : 1, 'name' : 'a') NULL ON NULL)", - ); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), - .. - }) => { - assert_eq!( - &FunctionArg::ExprNamed { - name: Expr::Value((Value::SingleQuotedString("name".into())).with_empty_span()), - arg: FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("value".into())).with_empty_span() - )), - operator: FunctionArgOperator::Colon - }, - &args[0] - ); - assert!(matches!( - args[1], - FunctionArg::ExprNamed { - name: Expr::Value(ValueWithSpan { - value: Value::SingleQuotedString(_), - span: _ - }), - arg: FunctionArgExpr::Expr(Expr::Function(_)), - operator: FunctionArgOperator::Colon - } - )); - assert_eq!( - &[FunctionArgumentClause::JsonNullClause( - JsonNullClause::NullOnNull - )], - &clauses[..] - ); - } - _ => unreachable!(), - } -} - #[test] fn parse_mod_no_spaces() { use self::Expr::*; @@ -1898,7 +1495,7 @@ fn parse_is_not_distinct_from() { #[test] fn parse_not_precedence() { // NOT has higher precedence than OR/AND, so the following must parse as (NOT true) OR true - let sql = "NOT 1 OR 1"; + let sql = "NOT true OR true"; assert_matches!( verified_expr(sql), Expr::BinaryOp { @@ -1924,9 +1521,9 @@ fn parse_not_precedence() { Expr::UnaryOp { op: UnaryOperator::Not, expr: Box::new(Expr::Between { - expr: Box::new(Expr::value(number("1"))), - low: Box::new(Expr::value(number("1"))), - high: Box::new(Expr::value(number("2"))), + expr: Box::new(Expr::Value(number("1"))), + low: Box::new(Expr::Value(number("1"))), + high: Box::new(Expr::Value(number("2"))), negated: true, }), }, @@ -1939,13 +1536,9 @@ fn parse_not_precedence() { Expr::UnaryOp { op: UnaryOperator::Not, expr: Box::new(Expr::Like { - expr: Box::new(Expr::Value( - (Value::SingleQuotedString("a".into())).with_empty_span() - )), + expr: Box::new(Expr::Value(Value::SingleQuotedString("a".into()))), negated: true, - pattern: Box::new(Expr::Value( - (Value::SingleQuotedString("b".into())).with_empty_span() - )), + pattern: Box::new(Expr::Value(Value::SingleQuotedString("b".into()))), escape_char: None, any: false, }), @@ -1960,9 +1553,7 @@ fn parse_not_precedence() { op: UnaryOperator::Not, expr: Box::new(Expr::InList { expr: Box::new(Expr::Identifier("a".into())), - list: vec![Expr::Value( - (Value::SingleQuotedString("a".into())).with_empty_span() - )], + list: vec![Expr::Value(Value::SingleQuotedString("a".into()))], negated: true, }), }, @@ -1982,13 +1573,12 @@ fn parse_null_like() { expr: Box::new(Expr::Identifier(Ident::new("column1"))), any: false, negated: false, - pattern: Box::new(Expr::Value((Value::Null).with_empty_span())), + pattern: Box::new(Expr::Value(Value::Null)), escape_char: None, }, alias: Ident { value: "col_null".to_owned(), quote_style: None, - span: Span::empty(), }, }, select.projection[0] @@ -1996,7 +1586,7 @@ fn parse_null_like() { assert_eq!( SelectItem::ExprWithAlias { expr: Expr::Like { - expr: Box::new(Expr::Value((Value::Null).with_empty_span())), + expr: Box::new(Expr::Value(Value::Null)), any: false, negated: false, pattern: Box::new(Expr::Identifier(Ident::new("column1"))), @@ -2005,7 +1595,6 @@ fn parse_null_like() { alias: Ident { value: "null_col".to_owned(), quote_style: None, - span: Span::empty(), }, }, select.projection[1] @@ -2024,9 +1613,7 @@ fn parse_ilike() { Expr::ILike { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value( - (Value::SingleQuotedString("%a".to_string())).with_empty_span() - )), + pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), escape_char: None, any: false, }, @@ -2043,10 +1630,8 @@ fn parse_ilike() { Expr::ILike { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value( - (Value::SingleQuotedString("%a".to_string())).with_empty_span() - )), - escape_char: Some(Value::SingleQuotedString('^'.to_string())), + pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + escape_char: Some('^'.to_string()), any: false, }, select.selection.unwrap() @@ -2063,9 +1648,7 @@ fn parse_ilike() { Expr::IsNull(Box::new(Expr::ILike { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value( - (Value::SingleQuotedString("%a".to_string())).with_empty_span() - )), + pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), escape_char: None, any: false, })), @@ -2088,9 +1671,7 @@ fn parse_like() { Expr::Like { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value( - (Value::SingleQuotedString("%a".to_string())).with_empty_span() - )), + pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), escape_char: None, any: false, }, @@ -2107,10 +1688,8 @@ fn parse_like() { Expr::Like { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value( - (Value::SingleQuotedString("%a".to_string())).with_empty_span() - )), - escape_char: Some(Value::SingleQuotedString('^'.to_string())), + pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + escape_char: Some('^'.to_string()), any: false, }, select.selection.unwrap() @@ -2127,9 +1706,7 @@ fn parse_like() { Expr::IsNull(Box::new(Expr::Like { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value( - (Value::SingleQuotedString("%a".to_string())).with_empty_span() - )), + pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), escape_char: None, any: false, })), @@ -2152,9 +1729,7 @@ fn parse_similar_to() { Expr::SimilarTo { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value( - (Value::SingleQuotedString("%a".to_string())).with_empty_span() - )), + pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), escape_char: None, }, select.selection.unwrap() @@ -2170,27 +1745,8 @@ fn parse_similar_to() { Expr::SimilarTo { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value( - (Value::SingleQuotedString("%a".to_string())).with_empty_span() - )), - escape_char: Some(Value::SingleQuotedString('^'.to_string())), - }, - select.selection.unwrap() - ); - - let sql = &format!( - "SELECT * FROM customers WHERE name {}SIMILAR TO '%a' ESCAPE NULL", - if negated { "NOT " } else { "" } - ); - let select = verified_only_select(sql); - assert_eq!( - Expr::SimilarTo { - expr: Box::new(Expr::Identifier(Ident::new("name"))), - negated, - pattern: Box::new(Expr::Value( - (Value::SingleQuotedString("%a".to_string())).with_empty_span() - )), - escape_char: Some(Value::Null), + pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + escape_char: Some('^'.to_string()), }, select.selection.unwrap() ); @@ -2205,10 +1761,8 @@ fn parse_similar_to() { Expr::IsNull(Box::new(Expr::SimilarTo { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value( - (Value::SingleQuotedString("%a".to_string())).with_empty_span() - )), - escape_char: Some(Value::SingleQuotedString('^'.to_string())), + pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + escape_char: Some('^'.to_string()), })), select.selection.unwrap() ); @@ -2229,8 +1783,8 @@ fn parse_in_list() { Expr::InList { expr: Box::new(Expr::Identifier(Ident::new("segment"))), list: vec![ - Expr::Value((Value::SingleQuotedString("HIGH".to_string())).with_empty_span()), - Expr::Value((Value::SingleQuotedString("MED".to_string())).with_empty_span()), + Expr::Value(Value::SingleQuotedString("HIGH".to_string())), + Expr::Value(Value::SingleQuotedString("MED".to_string())), ], negated, }, @@ -2255,22 +1809,6 @@ fn parse_in_subquery() { ); } -#[test] -fn parse_in_union() { - let sql = "SELECT * FROM customers WHERE segment IN ((SELECT segm FROM bar) UNION (SELECT segm FROM bar2))"; - let select = verified_only_select(sql); - assert_eq!( - Expr::InSubquery { - expr: Box::new(Expr::Identifier(Ident::new("segment"))), - subquery: Box::new(verified_query( - "(SELECT segm FROM bar) UNION (SELECT segm FROM bar2)" - )), - negated: false, - }, - select.selection.unwrap() - ); -} - #[test] fn parse_in_unnest() { fn chk(negated: bool) { @@ -2377,6 +1915,44 @@ fn parse_binary_all() { ); } +#[test] +fn parse_logical_xor() { + let sql = "SELECT true XOR true, false XOR false, true XOR false, false XOR true"; + let select = verified_only_select(sql); + assert_eq!( + SelectItem::UnnamedExpr(Expr::BinaryOp { + left: Box::new(Expr::Value(Value::Boolean(true))), + op: BinaryOperator::Xor, + right: Box::new(Expr::Value(Value::Boolean(true))), + }), + select.projection[0] + ); + assert_eq!( + SelectItem::UnnamedExpr(Expr::BinaryOp { + left: Box::new(Expr::Value(Value::Boolean(false))), + op: BinaryOperator::Xor, + right: Box::new(Expr::Value(Value::Boolean(false))), + }), + select.projection[1] + ); + assert_eq!( + SelectItem::UnnamedExpr(Expr::BinaryOp { + left: Box::new(Expr::Value(Value::Boolean(true))), + op: BinaryOperator::Xor, + right: Box::new(Expr::Value(Value::Boolean(false))), + }), + select.projection[2] + ); + assert_eq!( + SelectItem::UnnamedExpr(Expr::BinaryOp { + left: Box::new(Expr::Value(Value::Boolean(false))), + op: BinaryOperator::Xor, + right: Box::new(Expr::Value(Value::Boolean(true))), + }), + select.projection[3] + ); +} + #[test] fn parse_between() { fn chk(negated: bool) { @@ -2388,8 +1964,8 @@ fn parse_between() { assert_eq!( Expr::Between { expr: Box::new(Expr::Identifier(Ident::new("age"))), - low: Box::new(Expr::value(number("25"))), - high: Box::new(Expr::value(number("32"))), + low: Box::new(Expr::Value(number("25"))), + high: Box::new(Expr::Value(number("32"))), negated, }, select.selection.unwrap() @@ -2406,16 +1982,16 @@ fn parse_between_with_expr() { let select = verified_only_select(sql); assert_eq!( Expr::IsNull(Box::new(Expr::Between { - expr: Box::new(Expr::value(number("1"))), + expr: Box::new(Expr::Value(number("1"))), low: Box::new(Expr::BinaryOp { - left: Box::new(Expr::value(number("1"))), + left: Box::new(Expr::Value(number("1"))), op: Plus, - right: Box::new(Expr::value(number("2"))), + right: Box::new(Expr::Value(number("2"))), }), high: Box::new(Expr::BinaryOp { - left: Box::new(Expr::value(number("3"))), + left: Box::new(Expr::Value(number("3"))), op: Plus, - right: Box::new(Expr::value(number("4"))), + right: Box::new(Expr::Value(number("4"))), }), negated: false, })), @@ -2427,19 +2003,19 @@ fn parse_between_with_expr() { assert_eq!( Expr::BinaryOp { left: Box::new(Expr::BinaryOp { - left: Box::new(Expr::value(number("1"))), + left: Box::new(Expr::Value(number("1"))), op: BinaryOperator::Eq, - right: Box::new(Expr::value(number("1"))), + right: Box::new(Expr::Value(number("1"))), }), op: BinaryOperator::And, right: Box::new(Expr::Between { expr: Box::new(Expr::BinaryOp { - left: Box::new(Expr::value(number("1"))), + left: Box::new(Expr::Value(number("1"))), op: BinaryOperator::Plus, right: Box::new(Expr::Identifier(Ident::new("x"))), }), - low: Box::new(Expr::value(number("1"))), - high: Box::new(Expr::value(number("2"))), + low: Box::new(Expr::Value(number("1"))), + high: Box::new(Expr::Value(number("2"))), negated: false, }), }, @@ -2454,15 +2030,13 @@ fn parse_tuples() { assert_eq!( vec![ SelectItem::UnnamedExpr(Expr::Tuple(vec![ - Expr::value(number("1")), - Expr::value(number("2")), + Expr::Value(number("1")), + Expr::Value(number("2")), ])), - SelectItem::UnnamedExpr(Expr::Nested(Box::new(Expr::Value( - (number("1")).with_empty_span() - )))), + SelectItem::UnnamedExpr(Expr::Nested(Box::new(Expr::Value(number("1"))))), SelectItem::UnnamedExpr(Expr::Tuple(vec![ - Expr::Value((Value::SingleQuotedString("foo".into())).with_empty_span()), - Expr::value(number("3")), + Expr::Value(Value::SingleQuotedString("foo".into())), + Expr::Value(number("3")), Expr::Identifier(Ident::new("baz")), ])), ], @@ -2492,33 +2066,27 @@ fn parse_select_order_by() { fn chk(sql: &str) { let select = verified_query(sql); assert_eq!( - OrderByKind::Expressions(vec![ + vec![ OrderByExpr { expr: Expr::Identifier(Ident::new("lname")), - options: OrderByOptions { - asc: Some(true), - nulls_first: None, - }, + asc: Some(true), + nulls_first: None, with_fill: None, }, OrderByExpr { expr: Expr::Identifier(Ident::new("fname")), - options: OrderByOptions { - asc: Some(false), - nulls_first: None, - }, + asc: Some(false), + nulls_first: None, with_fill: None, }, OrderByExpr { expr: Expr::Identifier(Ident::new("id")), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, + asc: None, + nulls_first: None, with_fill: None, }, - ]), - select.order_by.expect("ORDER BY expected").kind + ], + select.order_by.expect("ORDER BY expected").exprs ); } chk("SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY lname ASC, fname DESC, id"); @@ -2533,164 +2101,23 @@ fn parse_select_order_by_limit() { ORDER BY lname ASC, fname DESC LIMIT 2"; let select = verified_query(sql); assert_eq!( - OrderByKind::Expressions(vec![ + vec![ OrderByExpr { expr: Expr::Identifier(Ident::new("lname")), - options: OrderByOptions { - asc: Some(true), - nulls_first: None, - }, + asc: Some(true), + nulls_first: None, with_fill: None, }, OrderByExpr { expr: Expr::Identifier(Ident::new("fname")), - options: OrderByOptions { - asc: Some(false), - nulls_first: None, - }, + asc: Some(false), + nulls_first: None, with_fill: None, }, - ]), - select.order_by.expect("ORDER BY expected").kind + ], + select.order_by.expect("ORDER BY expected").exprs ); - let expected_limit_clause = LimitClause::LimitOffset { - limit: Some(Expr::value(number("2"))), - offset: None, - limit_by: vec![], - }; - assert_eq!(Some(expected_limit_clause), select.limit_clause); -} - -#[test] -fn parse_select_order_by_all() { - fn chk(sql: &str, except_order_by: OrderByKind) { - let dialects = all_dialects_where(|d| d.supports_order_by_all()); - let select = dialects.verified_query(sql); - assert_eq!( - except_order_by, - select.order_by.expect("ORDER BY expected").kind - ); - } - let test_cases = [ - ( - "SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY ALL", - OrderByKind::All(OrderByOptions { - asc: None, - nulls_first: None, - }), - ), - ( - "SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY ALL NULLS FIRST", - OrderByKind::All(OrderByOptions { - asc: None, - nulls_first: Some(true), - }), - ), - ( - "SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY ALL NULLS LAST", - OrderByKind::All(OrderByOptions { - asc: None, - nulls_first: Some(false), - }), - ), - ( - "SELECT id, fname, lname FROM customer ORDER BY ALL ASC", - OrderByKind::All(OrderByOptions { - asc: Some(true), - nulls_first: None, - }), - ), - ( - "SELECT id, fname, lname FROM customer ORDER BY ALL ASC NULLS FIRST", - OrderByKind::All(OrderByOptions { - asc: Some(true), - nulls_first: Some(true), - }), - ), - ( - "SELECT id, fname, lname FROM customer ORDER BY ALL ASC NULLS LAST", - OrderByKind::All(OrderByOptions { - asc: Some(true), - nulls_first: Some(false), - }), - ), - ( - "SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY ALL DESC", - OrderByKind::All(OrderByOptions { - asc: Some(false), - nulls_first: None, - }), - ), - ( - "SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY ALL DESC NULLS FIRST", - OrderByKind::All(OrderByOptions { - asc: Some(false), - nulls_first: Some(true), - }), - ), - ( - "SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY ALL DESC NULLS LAST", - OrderByKind::All(OrderByOptions { - asc: Some(false), - nulls_first: Some(false), - }), - ), - ]; - - for (sql, expected_order_by) in test_cases { - chk(sql, expected_order_by); - } -} - -#[test] -fn parse_select_order_by_not_support_all() { - fn chk(sql: &str, except_order_by: OrderByKind) { - let dialects = all_dialects_where(|d| !d.supports_order_by_all()); - let select = dialects.verified_query(sql); - assert_eq!( - except_order_by, - select.order_by.expect("ORDER BY expected").kind - ); - } - let test_cases = [ - ( - "SELECT id, ALL FROM customer WHERE id < 5 ORDER BY ALL", - OrderByKind::Expressions(vec![OrderByExpr { - expr: Expr::Identifier(Ident::new("ALL")), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, - with_fill: None, - }]), - ), - ( - "SELECT id, ALL FROM customer ORDER BY ALL ASC NULLS FIRST", - OrderByKind::Expressions(vec![OrderByExpr { - expr: Expr::Identifier(Ident::new("ALL")), - options: OrderByOptions { - asc: Some(true), - nulls_first: Some(true), - }, - with_fill: None, - }]), - ), - ( - "SELECT id, ALL FROM customer ORDER BY ALL DESC NULLS LAST", - OrderByKind::Expressions(vec![OrderByExpr { - expr: Expr::Identifier(Ident::new("ALL")), - options: OrderByOptions { - asc: Some(false), - nulls_first: Some(false), - }, - with_fill: None, - }]), - ), - ]; - - for (sql, expected_order_by) in test_cases { - chk(sql, expected_order_by); - } + assert_eq!(Some(Expr::Value(number("2"))), select.limit); } #[test] @@ -2699,32 +2126,23 @@ fn parse_select_order_by_nulls_order() { ORDER BY lname ASC NULLS FIRST, fname DESC NULLS LAST LIMIT 2"; let select = verified_query(sql); assert_eq!( - OrderByKind::Expressions(vec![ + vec![ OrderByExpr { expr: Expr::Identifier(Ident::new("lname")), - options: OrderByOptions { - asc: Some(true), - nulls_first: Some(true), - }, + asc: Some(true), + nulls_first: Some(true), with_fill: None, }, OrderByExpr { expr: Expr::Identifier(Ident::new("fname")), - options: OrderByOptions { - asc: Some(false), - nulls_first: Some(false), - }, + asc: Some(false), + nulls_first: Some(false), with_fill: None, }, - ]), - select.order_by.expect("ORDER BY expeccted").kind + ], + select.order_by.expect("ORDER BY expeccted").exprs ); - let expected_limit_clause = LimitClause::LimitOffset { - limit: Some(Expr::value(number("2"))), - offset: None, - limit_by: vec![], - }; - assert_eq!(Some(expected_limit_clause), select.limit_clause); + assert_eq!(Some(Expr::Value(number("2"))), select.limit); } #[test] @@ -2761,124 +2179,6 @@ fn parse_select_group_by_all() { ); } -#[test] -fn parse_group_by_with_modifier() { - let clauses = ["x", "a, b", "ALL"]; - let modifiers = [ - "WITH ROLLUP", - "WITH CUBE", - "WITH TOTALS", - "WITH ROLLUP WITH CUBE", - ]; - let expected_modifiers = [ - vec![GroupByWithModifier::Rollup], - vec![GroupByWithModifier::Cube], - vec![GroupByWithModifier::Totals], - vec![GroupByWithModifier::Rollup, GroupByWithModifier::Cube], - ]; - let dialects = all_dialects_where(|d| d.supports_group_by_with_modifier()); - - for clause in &clauses { - for (modifier, expected_modifier) in modifiers.iter().zip(expected_modifiers.iter()) { - let sql = format!("SELECT * FROM t GROUP BY {clause} {modifier}"); - match dialects.verified_stmt(&sql) { - Statement::Query(query) => { - let group_by = &query.body.as_select().unwrap().group_by; - if clause == &"ALL" { - assert_eq!(group_by, &GroupByExpr::All(expected_modifier.to_vec())); - } else { - assert_eq!( - group_by, - &GroupByExpr::Expressions( - clause - .split(", ") - .map(|c| Identifier(Ident::new(c))) - .collect(), - expected_modifier.to_vec() - ) - ); - } - } - _ => unreachable!(), - } - } - } - - // invalid cases - let invalid_cases = [ - "SELECT * FROM t GROUP BY x WITH", - "SELECT * FROM t GROUP BY x WITH ROLLUP CUBE", - "SELECT * FROM t GROUP BY x WITH WITH ROLLUP", - "SELECT * FROM t GROUP BY WITH ROLLUP", - ]; - for sql in invalid_cases { - dialects - .parse_sql_statements(sql) - .expect_err("Expected: one of ROLLUP or CUBE or TOTALS, found: WITH"); - } -} - -#[test] -fn parse_group_by_special_grouping_sets() { - let sql = "SELECT a, b, SUM(c) FROM tab1 GROUP BY a, b GROUPING SETS ((a, b), (a), (b), ())"; - match all_dialects().verified_stmt(sql) { - Statement::Query(query) => { - let group_by = &query.body.as_select().unwrap().group_by; - assert_eq!( - group_by, - &GroupByExpr::Expressions( - vec![ - Expr::Identifier(Ident::new("a")), - Expr::Identifier(Ident::new("b")) - ], - vec![GroupByWithModifier::GroupingSets(Expr::GroupingSets(vec![ - vec![ - Expr::Identifier(Ident::new("a")), - Expr::Identifier(Ident::new("b")) - ], - vec![Expr::Identifier(Ident::new("a")),], - vec![Expr::Identifier(Ident::new("b"))], - vec![] - ]))] - ) - ); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_group_by_grouping_sets_single_values() { - let sql = "SELECT a, b, SUM(c) FROM tab1 GROUP BY a, b GROUPING SETS ((a, b), a, (b), c, ())"; - let canonical = - "SELECT a, b, SUM(c) FROM tab1 GROUP BY a, b GROUPING SETS ((a, b), (a), (b), (c), ())"; - match all_dialects().one_statement_parses_to(sql, canonical) { - Statement::Query(query) => { - let group_by = &query.body.as_select().unwrap().group_by; - assert_eq!( - group_by, - &GroupByExpr::Expressions( - vec![ - Expr::Identifier(Ident::new("a")), - Expr::Identifier(Ident::new("b")) - ], - vec![GroupByWithModifier::GroupingSets(Expr::GroupingSets(vec![ - vec![ - Expr::Identifier(Ident::new("a")), - Expr::Identifier(Ident::new("b")) - ], - vec![Expr::Identifier(Ident::new("a"))], - vec![Expr::Identifier(Ident::new("b"))], - vec![Expr::Identifier(Ident::new("c"))], - vec![] - ]))] - ) - ); - } - _ => unreachable!(), - } -} - #[test] fn parse_select_having() { let sql = "SELECT foo FROM bar GROUP BY foo HAVING COUNT(*) > 1"; @@ -2886,8 +2186,7 @@ fn parse_select_having() { assert_eq!( Some(Expr::BinaryOp { left: Box::new(Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("COUNT")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("COUNT")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -2900,7 +2199,7 @@ fn parse_select_having() { within_group: vec![] })), op: BinaryOperator::Gt, - right: Box::new(Expr::value(number("1"))), + right: Box::new(Expr::Value(number("1"))), }), select.having ); @@ -2917,8 +2216,7 @@ fn parse_select_qualify() { assert_eq!( Some(Expr::BinaryOp { left: Box::new(Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("ROW_NUMBER")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("ROW_NUMBER")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -2932,10 +2230,8 @@ fn parse_select_qualify() { partition_by: vec![Expr::Identifier(Ident::new("p"))], order_by: vec![OrderByExpr { expr: Expr::Identifier(Ident::new("o")), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, + asc: None, + nulls_first: None, with_fill: None, }], window_frame: None, @@ -2943,7 +2239,7 @@ fn parse_select_qualify() { within_group: vec![] })), op: BinaryOperator::Eq, - right: Box::new(Expr::value(number("1"))), + right: Box::new(Expr::Value(number("1"))), }), select.qualify ); @@ -2954,7 +2250,7 @@ fn parse_select_qualify() { Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("row_num"))), op: BinaryOperator::Eq, - right: Box::new(Expr::value(number("1"))), + right: Box::new(Expr::Value(number("1"))), }), select.qualify ); @@ -2966,14 +2262,6 @@ fn parse_limit_accepts_all() { "SELECT id, fname, lname FROM customer WHERE id = 1 LIMIT ALL", "SELECT id, fname, lname FROM customer WHERE id = 1", ); - one_statement_parses_to( - "SELECT id, fname, lname FROM customer WHERE id = 1 LIMIT ALL OFFSET 1", - "SELECT id, fname, lname FROM customer WHERE id = 1 OFFSET 1", - ); - one_statement_parses_to( - "SELECT id, fname, lname FROM customer WHERE id = 1 OFFSET 1 LIMIT ALL", - "SELECT id, fname, lname FROM customer WHERE id = 1 OFFSET 1", - ); } #[test] @@ -3075,7 +2363,7 @@ fn parse_cast() { &Expr::Cast { kind: CastKind::Cast, expr: Box::new(Expr::Identifier(Ident::new("id"))), - data_type: DataType::Varbinary(Some(BinaryLength::IntegerLength { length: 50 })), + data_type: DataType::Varbinary(Some(50)), format: None, }, expr_from_projection(only(&select.projection)) @@ -3334,8 +2622,7 @@ fn parse_listagg() { assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("LISTAGG")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("LISTAGG")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: Some(DuplicateTreatment::Distinct), @@ -3344,14 +2631,14 @@ fn parse_listagg() { "dateid" )))), FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString(", ".to_owned())).with_empty_span() + Value::SingleQuotedString(", ".to_owned()) ))) ], clauses: vec![FunctionArgumentClause::OnOverflow( ListAggOnOverflow::Truncate { - filler: Some(Box::new(Expr::Value( - (Value::SingleQuotedString("%".to_string(),)).with_empty_span() - ))), + filler: Some(Box::new(Expr::Value(Value::SingleQuotedString( + "%".to_string(), + )))), with_count: false, } )], @@ -3364,24 +2651,18 @@ fn parse_listagg() { expr: Expr::Identifier(Ident { value: "id".to_string(), quote_style: None, - span: Span::empty(), }), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, + asc: None, + nulls_first: None, with_fill: None, }, OrderByExpr { expr: Expr::Identifier(Ident { value: "username".to_string(), quote_style: None, - span: Span::empty(), }), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, + asc: None, + nulls_first: None, with_fill: None, }, ] @@ -3493,10 +2774,7 @@ fn parse_window_function_null_treatment_arg() { let SelectItem::UnnamedExpr(Expr::Function(actual)) = &projection[i] else { unreachable!() }; - assert_eq!( - ObjectName::from(vec![Ident::new("FIRST_VALUE")]), - actual.name - ); + assert_eq!(ObjectName(vec![Ident::new("FIRST_VALUE")]), actual.name); let FunctionArguments::List(arg_list) = &actual.args else { panic!("expected argument list") }; @@ -3537,138 +2815,6 @@ fn parse_window_function_null_treatment_arg() { ); } -#[test] -fn test_compound_expr() { - let supported_dialects = TestedDialects::new(vec![ - Box::new(GenericDialect {}), - Box::new(DuckDbDialect {}), - Box::new(BigQueryDialect {}), - ]); - let sqls = [ - "SELECT abc[1].f1 FROM t", - "SELECT abc[1].f1.f2 FROM t", - "SELECT f1.abc[1] FROM t", - "SELECT f1.f2.abc[1] FROM t", - "SELECT f1.abc[1].f2 FROM t", - "SELECT named_struct('a', 1, 'b', 2).a", - "SELECT named_struct('a', 1, 'b', 2).a", - "SELECT make_array(1, 2, 3)[1]", - "SELECT make_array(named_struct('a', 1))[1].a", - "SELECT abc[1][-1].a.b FROM t", - "SELECT abc[1][-1].a.b[1] FROM t", - ]; - for sql in sqls { - supported_dialects.verified_stmt(sql); - } -} - -#[test] -fn test_double_value() { - let dialects = all_dialects(); - let test_cases = vec![ - gen_number_case_with_sign("0."), - gen_number_case_with_sign("0.0"), - gen_number_case_with_sign("0000."), - gen_number_case_with_sign("0000.00"), - gen_number_case_with_sign(".0"), - gen_number_case_with_sign(".00"), - gen_number_case_with_sign("0e0"), - gen_number_case_with_sign("0e+0"), - gen_number_case_with_sign("0e-0"), - gen_number_case_with_sign("0.e-0"), - gen_number_case_with_sign("0.e+0"), - gen_number_case_with_sign(".0e-0"), - gen_number_case_with_sign(".0e+0"), - gen_number_case_with_sign("00.0e+0"), - gen_number_case_with_sign("00.0e-0"), - ]; - - for (input, expected) in test_cases { - for (i, expr) in input.iter().enumerate() { - if let Statement::Query(query) = - dialects.one_statement_parses_to(&format!("SELECT {expr}"), "") - { - if let SetExpr::Select(select) = *query.body { - assert_eq!(expected[i], select.projection[0]); - } else { - panic!("Expected a SELECT statement"); - } - } else { - panic!("Expected a SELECT statement"); - } - } - } -} - -fn gen_number_case(value: &str) -> (Vec, Vec) { - let input = vec![ - value.to_string(), - format!("{} col_alias", value), - format!("{} AS col_alias", value), - ]; - let expected = vec![ - SelectItem::UnnamedExpr(Expr::value(number(value))), - SelectItem::ExprWithAlias { - expr: Expr::value(number(value)), - alias: Ident::new("col_alias"), - }, - SelectItem::ExprWithAlias { - expr: Expr::value(number(value)), - alias: Ident::new("col_alias"), - }, - ]; - (input, expected) -} - -fn gen_sign_number_case(value: &str, op: UnaryOperator) -> (Vec, Vec) { - match op { - UnaryOperator::Plus | UnaryOperator::Minus => {} - _ => panic!("Invalid sign"), - } - - let input = vec![ - format!("{}{}", op, value), - format!("{}{} col_alias", op, value), - format!("{}{} AS col_alias", op, value), - ]; - let expected = vec![ - SelectItem::UnnamedExpr(Expr::UnaryOp { - op, - expr: Box::new(Expr::value(number(value))), - }), - SelectItem::ExprWithAlias { - expr: Expr::UnaryOp { - op, - expr: Box::new(Expr::value(number(value))), - }, - alias: Ident::new("col_alias"), - }, - SelectItem::ExprWithAlias { - expr: Expr::UnaryOp { - op, - expr: Box::new(Expr::value(number(value))), - }, - alias: Ident::new("col_alias"), - }, - ]; - (input, expected) -} - -/// generate the test cases for signed and unsigned numbers -/// For example, given "0.0", the test cases will be: -/// - "0.0" -/// - "+0.0" -/// - "-0.0" -fn gen_number_case_with_sign(number: &str) -> (Vec, Vec) { - let (mut input, mut expected) = gen_number_case(number); - for op in [UnaryOperator::Plus, UnaryOperator::Minus] { - let (input_sign, expected_sign) = gen_sign_number_case(number, op); - input.extend(input_sign); - expected.extend(expected_sign); - } - (input, expected) -} - #[test] fn parse_negative_value() { let sql1 = "SELECT -1"; @@ -3714,7 +2860,7 @@ fn parse_create_table() { name, columns, constraints, - table_options, + with_options, if_not_exists: false, external: false, file_format: None, @@ -3731,6 +2877,7 @@ fn parse_create_table() { length: 100, unit: None, })), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -3738,7 +2885,8 @@ fn parse_create_table() { }, ColumnDef { name: "lat".into(), - data_type: DataType::Double(ExactNumberInfo::None), + data_type: DataType::Double, + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Null, @@ -3746,12 +2894,14 @@ fn parse_create_table() { }, ColumnDef { name: "lng".into(), - data_type: DataType::Double(ExactNumberInfo::None), + data_type: DataType::Double, + collation: None, options: vec![], }, ColumnDef { name: "constrained".into(), data_type: DataType::Int(None), + collation: None, options: vec![ ColumnOptionDef { name: None, @@ -3784,10 +2934,11 @@ fn parse_create_table() { ColumnDef { name: "ref".into(), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::ForeignKey { - foreign_table: ObjectName::from(vec!["othertable".into()]), + foreign_table: ObjectName(vec!["othertable".into()]), referred_columns: vec!["a".into(), "b".into()], on_delete: None, on_update: None, @@ -3798,10 +2949,11 @@ fn parse_create_table() { ColumnDef { name: "ref2".into(), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::ForeignKey { - foreign_table: ObjectName::from(vec!["othertable2".into()]), + foreign_table: ObjectName(vec!["othertable2".into()]), referred_columns: vec![], on_delete: Some(ReferentialAction::Cascade), on_update: Some(ReferentialAction::NoAction), @@ -3816,9 +2968,8 @@ fn parse_create_table() { vec![ TableConstraint::ForeignKey { name: Some("fkey".into()), - index_name: None, columns: vec!["lat".into()], - foreign_table: ObjectName::from(vec!["othertable3".into()]), + foreign_table: ObjectName(vec!["othertable3".into()]), referred_columns: vec!["lat".into()], on_delete: Some(ReferentialAction::Restrict), on_update: None, @@ -3826,9 +2977,8 @@ fn parse_create_table() { }, TableConstraint::ForeignKey { name: Some("fkey2".into()), - index_name: None, columns: vec!["lat".into()], - foreign_table: ObjectName::from(vec!["othertable4".into()]), + foreign_table: ObjectName(vec!["othertable4".into()]), referred_columns: vec!["lat".into()], on_delete: Some(ReferentialAction::NoAction), on_update: Some(ReferentialAction::Restrict), @@ -3836,9 +2986,8 @@ fn parse_create_table() { }, TableConstraint::ForeignKey { name: None, - index_name: None, columns: vec!["lat".into()], - foreign_table: ObjectName::from(vec!["othertable4".into()]), + foreign_table: ObjectName(vec!["othertable4".into()]), referred_columns: vec!["lat".into()], on_delete: Some(ReferentialAction::Cascade), on_update: Some(ReferentialAction::SetDefault), @@ -3846,9 +2995,8 @@ fn parse_create_table() { }, TableConstraint::ForeignKey { name: None, - index_name: None, columns: vec!["lng".into()], - foreign_table: ObjectName::from(vec!["othertable4".into()]), + foreign_table: ObjectName(vec!["othertable4".into()]), referred_columns: vec!["longitude".into()], on_delete: None, on_update: Some(ReferentialAction::SetNull), @@ -3856,7 +3004,7 @@ fn parse_create_table() { }, ] ); - assert_eq!(table_options, CreateTableOptions::None); + assert_eq!(with_options, vec![]); } _ => unreachable!(), } @@ -3901,7 +3049,7 @@ fn parse_create_table_with_constraint_characteristics() { name, columns, constraints, - table_options, + with_options, if_not_exists: false, external: false, file_format: None, @@ -3918,6 +3066,7 @@ fn parse_create_table_with_constraint_characteristics() { length: 100, unit: None, })), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -3925,7 +3074,8 @@ fn parse_create_table_with_constraint_characteristics() { }, ColumnDef { name: "lat".into(), - data_type: DataType::Double(ExactNumberInfo::None), + data_type: DataType::Double, + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Null, @@ -3933,7 +3083,8 @@ fn parse_create_table_with_constraint_characteristics() { }, ColumnDef { name: "lng".into(), - data_type: DataType::Double(ExactNumberInfo::None), + data_type: DataType::Double, + collation: None, options: vec![], }, ] @@ -3943,9 +3094,8 @@ fn parse_create_table_with_constraint_characteristics() { vec![ TableConstraint::ForeignKey { name: Some("fkey".into()), - index_name: None, columns: vec!["lat".into()], - foreign_table: ObjectName::from(vec!["othertable3".into()]), + foreign_table: ObjectName(vec!["othertable3".into()]), referred_columns: vec!["lat".into()], on_delete: Some(ReferentialAction::Restrict), on_update: None, @@ -3957,9 +3107,8 @@ fn parse_create_table_with_constraint_characteristics() { }, TableConstraint::ForeignKey { name: Some("fkey2".into()), - index_name: None, columns: vec!["lat".into()], - foreign_table: ObjectName::from(vec!["othertable4".into()]), + foreign_table: ObjectName(vec!["othertable4".into()]), referred_columns: vec!["lat".into()], on_delete: Some(ReferentialAction::NoAction), on_update: Some(ReferentialAction::Restrict), @@ -3971,9 +3120,8 @@ fn parse_create_table_with_constraint_characteristics() { }, TableConstraint::ForeignKey { name: None, - index_name: None, columns: vec!["lat".into()], - foreign_table: ObjectName::from(vec!["othertable4".into()]), + foreign_table: ObjectName(vec!["othertable4".into()]), referred_columns: vec!["lat".into()], on_delete: Some(ReferentialAction::Cascade), on_update: Some(ReferentialAction::SetDefault), @@ -3985,9 +3133,8 @@ fn parse_create_table_with_constraint_characteristics() { }, TableConstraint::ForeignKey { name: None, - index_name: None, columns: vec!["lng".into()], - foreign_table: ObjectName::from(vec!["othertable4".into()]), + foreign_table: ObjectName(vec!["othertable4".into()]), referred_columns: vec!["longitude".into()], on_delete: None, on_update: Some(ReferentialAction::SetNull), @@ -3999,7 +3146,7 @@ fn parse_create_table_with_constraint_characteristics() { }, ] ); - assert_eq!(table_options, CreateTableOptions::None); + assert_eq!(with_options, vec![]); } _ => unreachable!(), } @@ -4046,13 +3193,13 @@ fn parse_create_table_column_constraint_characteristics() { syntax }; - let sql = format!("CREATE TABLE t (a int UNIQUE {syntax})"); + let sql = format!("CREATE TABLE t (a int UNIQUE {})", syntax); let expected_clause = if syntax.is_empty() { String::new() } else { format!(" {syntax}") }; - let expected = format!("CREATE TABLE t (a INT UNIQUE{expected_clause})"); + let expected = format!("CREATE TABLE t (a INT UNIQUE{})", expected_clause); let ast = one_statement_parses_to(&sql, &expected); let expected_value = if deferrable.is_some() || initially.is_some() || enforced.is_some() { @@ -4072,6 +3219,7 @@ fn parse_create_table_column_constraint_characteristics() { vec![ColumnDef { name: "a".into(), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Unique { @@ -4179,18 +3327,20 @@ fn parse_create_table_hive_array() { .. }) => { assert!(if_not_exists); - assert_eq!(name, ObjectName::from(vec!["something".into()])); + assert_eq!(name, ObjectName(vec!["something".into()])); assert_eq!( columns, vec![ ColumnDef { name: Ident::new("name"), data_type: DataType::Int(None), + collation: None, options: vec![], }, ColumnDef { name: Ident::new("val"), data_type: DataType::Array(expected), + collation: None, options: vec![], }, ], @@ -4267,10 +3417,7 @@ fn parse_assert_message() { message: Some(message), } => { match message { - Expr::Value(ValueWithSpan { - value: Value::SingleQuotedString(s), - span: _, - }) => assert_eq!(s, "No rows in my_table"), + Expr::Value(Value::SingleQuotedString(s)) => assert_eq!(s, "No rows in my_table"), _ => unreachable!(), }; } @@ -4288,14 +3435,6 @@ fn parse_create_schema() { } _ => unreachable!(), } - - verified_stmt(r#"CREATE SCHEMA a.b.c OPTIONS(key1 = 'value1', key2 = 'value2')"#); - verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a OPTIONS(key1 = 'value1')"#); - verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a OPTIONS()"#); - verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a DEFAULT COLLATE 'und:ci' OPTIONS()"#); - verified_stmt(r#"CREATE SCHEMA a.b.c WITH (key1 = 'value1', key2 = 'value2')"#); - verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a WITH (key1 = 'value1')"#); - verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a WITH ()"#); } #[test] @@ -4373,18 +3512,19 @@ fn parse_create_table_as_table() { schema_name: None, }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], }); match verified_stmt(sql1) { Statement::CreateTable(CreateTable { query, name, .. }) => { - assert_eq!(name, ObjectName::from(vec![Ident::new("new_table")])); + assert_eq!(name, ObjectName(vec![Ident::new("new_table")])); assert_eq!(query.unwrap(), expected_query1); } _ => unreachable!(), @@ -4399,18 +3539,19 @@ fn parse_create_table_as_table() { schema_name: Some("schema_name".to_string()), }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], }); match verified_stmt(sql2) { Statement::CreateTable(CreateTable { query, name, .. }) => { - assert_eq!(name, ObjectName::from(vec![Ident::new("new_table")])); + assert_eq!(name, ObjectName(vec![Ident::new("new_table")])); assert_eq!(query.unwrap(), expected_query2); } _ => unreachable!(), @@ -4489,22 +3630,16 @@ fn parse_create_table_with_options() { let sql = "CREATE TABLE t (c INT) WITH (foo = 'bar', a = 123)"; match generic.verified_stmt(sql) { - Statement::CreateTable(CreateTable { table_options, .. }) => { - let with_options = match table_options { - CreateTableOptions::With(options) => options, - _ => unreachable!(), - }; + Statement::CreateTable(CreateTable { with_options, .. }) => { assert_eq!( vec![ SqlOption::KeyValue { key: "foo".into(), - value: Expr::Value( - (Value::SingleQuotedString("bar".into())).with_empty_span() - ), + value: Expr::Value(Value::SingleQuotedString("bar".into())), }, SqlOption::KeyValue { key: "a".into(), - value: Expr::value(number("123")), + value: Expr::Value(number("123")), }, ], with_options @@ -4519,8 +3654,8 @@ fn parse_create_table_clone() { let sql = "CREATE OR REPLACE TABLE a CLONE a_tmp"; match verified_stmt(sql) { Statement::CreateTable(CreateTable { name, clone, .. }) => { - assert_eq!(ObjectName::from(vec![Ident::new("a")]), name); - assert_eq!(Some(ObjectName::from(vec![(Ident::new("a_tmp"))])), clone) + assert_eq!(ObjectName(vec![Ident::new("a")]), name); + assert_eq!(Some(ObjectName(vec![(Ident::new("a_tmp"))])), clone) } _ => unreachable!(), } @@ -4554,7 +3689,7 @@ fn parse_create_external_table() { name, columns, constraints, - table_options, + with_options, if_not_exists, external, file_format, @@ -4571,6 +3706,7 @@ fn parse_create_external_table() { length: 100, unit: None, })), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -4578,7 +3714,8 @@ fn parse_create_external_table() { }, ColumnDef { name: "lat".into(), - data_type: DataType::Double(ExactNumberInfo::None), + data_type: DataType::Double, + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Null, @@ -4586,7 +3723,8 @@ fn parse_create_external_table() { }, ColumnDef { name: "lng".into(), - data_type: DataType::Double(ExactNumberInfo::None), + data_type: DataType::Double, + collation: None, options: vec![], }, ] @@ -4597,7 +3735,7 @@ fn parse_create_external_table() { assert_eq!(FileFormat::TEXTFILE, file_format.unwrap()); assert_eq!("/tmp/example.csv", location.unwrap()); - assert_eq!(table_options, CreateTableOptions::None); + assert_eq!(with_options, vec![]); assert!(!if_not_exists); } _ => unreachable!(), @@ -4622,7 +3760,7 @@ fn parse_create_or_replace_external_table() { name, columns, constraints, - table_options, + with_options, if_not_exists, external, file_format, @@ -4639,6 +3777,7 @@ fn parse_create_or_replace_external_table() { length: 100, unit: None, })), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -4651,7 +3790,7 @@ fn parse_create_or_replace_external_table() { assert_eq!(FileFormat::TEXTFILE, file_format.unwrap()); assert_eq!("/tmp/example.csv", location.unwrap()); - assert_eq!(table_options, CreateTableOptions::None); + assert_eq!(with_options, vec![]); assert!(!if_not_exists); assert!(or_replace); } @@ -4727,12 +3866,9 @@ fn parse_alter_table() { [SqlOption::KeyValue { key: Ident { value: "classification".to_string(), - quote_style: Some('\''), - span: Span::empty(), + quote_style: Some('\'') }, - value: Expr::Value( - (Value::SingleQuotedString("parquet".to_string())).with_empty_span() - ), + value: Expr::Value(Value::SingleQuotedString("parquet".to_string())), }], ); } @@ -4740,65 +3876,6 @@ fn parse_alter_table() { } } -#[test] -fn parse_rename_table() { - match verified_stmt("RENAME TABLE test.test1 TO test_db.test2") { - Statement::RenameTable(rename_tables) => { - assert_eq!( - vec![RenameTable { - old_name: ObjectName::from(vec![ - Ident::new("test".to_string()), - Ident::new("test1".to_string()), - ]), - new_name: ObjectName::from(vec![ - Ident::new("test_db".to_string()), - Ident::new("test2".to_string()), - ]), - }], - rename_tables - ); - } - _ => unreachable!(), - }; - - match verified_stmt( - "RENAME TABLE old_table1 TO new_table1, old_table2 TO new_table2, old_table3 TO new_table3", - ) { - Statement::RenameTable(rename_tables) => { - assert_eq!( - vec![ - RenameTable { - old_name: ObjectName::from(vec![Ident::new("old_table1".to_string())]), - new_name: ObjectName::from(vec![Ident::new("new_table1".to_string())]), - }, - RenameTable { - old_name: ObjectName::from(vec![Ident::new("old_table2".to_string())]), - new_name: ObjectName::from(vec![Ident::new("new_table2".to_string())]), - }, - RenameTable { - old_name: ObjectName::from(vec![Ident::new("old_table3".to_string())]), - new_name: ObjectName::from(vec![Ident::new("new_table3".to_string())]), - } - ], - rename_tables - ); - } - _ => unreachable!(), - }; - - assert_eq!( - parse_sql_statements("RENAME TABLE old_table TO new_table a").unwrap_err(), - ParserError::ParserError("Expected: end of statement, found: a".to_string()) - ); - - assert_eq!( - parse_sql_statements("RENAME TABLE1 old_table TO new_table a").unwrap_err(), - ParserError::ParserError( - "Expected: KEYWORD `TABLE` after RENAME, found: TABLE1".to_string() - ) - ); -} - #[test] fn test_alter_table_with_on_cluster() { match all_dialects() @@ -4807,8 +3884,8 @@ fn test_alter_table_with_on_cluster() { Statement::AlterTable { name, on_cluster, .. } => { - assert_eq!(name.to_string(), "t"); - assert_eq!(on_cluster, Some(Ident::with_quote('\'', "cluster"))); + std::assert_eq!(name.to_string(), "t"); + std::assert_eq!(on_cluster, Some(Ident::with_quote('\'', "cluster"))); } _ => unreachable!(), } @@ -4819,15 +3896,15 @@ fn test_alter_table_with_on_cluster() { Statement::AlterTable { name, on_cluster, .. } => { - assert_eq!(name.to_string(), "t"); - assert_eq!(on_cluster, Some(Ident::new("cluster_name"))); + std::assert_eq!(name.to_string(), "t"); + std::assert_eq!(on_cluster, Some(Ident::new("cluster_name"))); } _ => unreachable!(), } let res = all_dialects() .parse_sql_statements("ALTER TABLE t ON CLUSTER 123 ADD CONSTRAINT bar PRIMARY KEY (baz)"); - assert_eq!( + std::assert_eq!( res.unwrap_err(), ParserError::ParserError("Expected: identifier, found: 123".to_string()) ) @@ -4876,13 +3953,11 @@ fn parse_alter_view_with_options() { vec![ SqlOption::KeyValue { key: "foo".into(), - value: Expr::Value( - (Value::SingleQuotedString("bar".into())).with_empty_span() - ), + value: Expr::Value(Value::SingleQuotedString("bar".into())), }, SqlOption::KeyValue { key: "a".into(), - value: Expr::value(number("123")), + value: Expr::Value(number("123")), }, ], with_options @@ -4973,13 +4048,12 @@ fn parse_alter_table_constraints() { check_one("UNIQUE (id)"); check_one("FOREIGN KEY (foo, bar) REFERENCES AnotherTable(foo, bar)"); check_one("CHECK (end_date > start_date OR end_date IS NULL)"); - check_one("CONSTRAINT fk FOREIGN KEY (lng) REFERENCES othertable4"); fn check_one(constraint_text: &str) { match alter_table_op(verified_stmt(&format!( "ALTER TABLE tab ADD {constraint_text}" ))) { - AlterTableOperation::AddConstraint { constraint, .. } => { + AlterTableOperation::AddConstraint(constraint) => { assert_eq!(constraint_text, constraint.to_string()); } _ => unreachable!(), @@ -4990,36 +4064,26 @@ fn parse_alter_table_constraints() { #[test] fn parse_alter_table_drop_column() { - check_one("DROP COLUMN IF EXISTS is_active"); check_one("DROP COLUMN IF EXISTS is_active CASCADE"); - check_one("DROP COLUMN IF EXISTS is_active RESTRICT"); one_statement_parses_to( - "ALTER TABLE tab DROP COLUMN IF EXISTS is_active CASCADE", + "ALTER TABLE tab DROP IF EXISTS is_active CASCADE", "ALTER TABLE tab DROP COLUMN IF EXISTS is_active CASCADE", ); one_statement_parses_to( "ALTER TABLE tab DROP is_active CASCADE", - "ALTER TABLE tab DROP is_active CASCADE", + "ALTER TABLE tab DROP COLUMN is_active CASCADE", ); - let dialects = all_dialects_where(|d| d.supports_comma_separated_drop_column_list()); - dialects.verified_stmt("ALTER TABLE tbl DROP COLUMN c1, c2, c3"); - fn check_one(constraint_text: &str) { match alter_table_op(verified_stmt(&format!("ALTER TABLE tab {constraint_text}"))) { AlterTableOperation::DropColumn { - has_column_keyword: true, - column_names, + column_name, if_exists, - drop_behavior, + cascade, } => { - assert_eq!("is_active", column_names.first().unwrap().to_string()); + assert_eq!("is_active", column_name.to_string()); assert!(if_exists); - match drop_behavior { - None => assert!(constraint_text.ends_with(" is_active")), - Some(DropBehavior::Restrict) => assert!(constraint_text.ends_with(" RESTRICT")), - Some(DropBehavior::Cascade) => assert!(constraint_text.ends_with(" CASCADE")), - } + assert!(cascade); } _ => unreachable!(), } @@ -5045,14 +4109,14 @@ fn parse_alter_table_alter_column() { ); match alter_table_op(verified_stmt(&format!( - "{alter_stmt} ALTER COLUMN is_active SET DEFAULT 0" + "{alter_stmt} ALTER COLUMN is_active SET DEFAULT false" ))) { AlterTableOperation::AlterColumn { column_name, op } => { assert_eq!("is_active", column_name.to_string()); assert_eq!( op, AlterColumnOperation::SetDefault { - value: Expr::Value((test_utils::number("0")).with_empty_span()) + value: Expr::Value(Value::Boolean(false)) } ); } @@ -5083,21 +4147,22 @@ fn parse_alter_table_alter_column_type() { AlterColumnOperation::SetDataType { data_type: DataType::Text, using: None, - had_set: true, } ); } _ => unreachable!(), } - verified_stmt(&format!("{alter_stmt} ALTER COLUMN is_active TYPE TEXT")); - let dialects = all_dialects_where(|d| d.supports_alter_column_type_using()); - dialects.verified_stmt(&format!( - "{alter_stmt} ALTER COLUMN is_active SET DATA TYPE TEXT USING 'text'" - )); + let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]); - let dialects = all_dialects_except(|d| d.supports_alter_column_type_using()); - let res = dialects.parse_sql_statements(&format!( + let res = + dialect.parse_sql_statements(&format!("{alter_stmt} ALTER COLUMN is_active TYPE TEXT")); + assert_eq!( + ParserError::ParserError("Expected: SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN, found: TYPE".to_string()), + res.unwrap_err() + ); + + let res = dialect.parse_sql_statements(&format!( "{alter_stmt} ALTER COLUMN is_active SET DATA TYPE TEXT USING 'text'" )); assert_eq!( @@ -5108,29 +4173,37 @@ fn parse_alter_table_alter_column_type() { #[test] fn parse_alter_table_drop_constraint() { - check_one("DROP CONSTRAINT IF EXISTS constraint_name"); - check_one("DROP CONSTRAINT IF EXISTS constraint_name RESTRICT"); - check_one("DROP CONSTRAINT IF EXISTS constraint_name CASCADE"); - fn check_one(constraint_text: &str) { - match alter_table_op(verified_stmt(&format!("ALTER TABLE tab {constraint_text}"))) { - AlterTableOperation::DropConstraint { - name: constr_name, - if_exists, - drop_behavior, - } => { - assert_eq!("constraint_name", constr_name.to_string()); - assert!(if_exists); - match drop_behavior { - None => assert!(constraint_text.ends_with(" constraint_name")), - Some(DropBehavior::Restrict) => assert!(constraint_text.ends_with(" RESTRICT")), - Some(DropBehavior::Cascade) => assert!(constraint_text.ends_with(" CASCADE")), - } - } - _ => unreachable!(), + let alter_stmt = "ALTER TABLE tab"; + match alter_table_op(verified_stmt( + "ALTER TABLE tab DROP CONSTRAINT constraint_name CASCADE", + )) { + AlterTableOperation::DropConstraint { + name: constr_name, + if_exists, + cascade, + } => { + assert_eq!("constraint_name", constr_name.to_string()); + assert!(!if_exists); + assert!(cascade); } + _ => unreachable!(), + } + match alter_table_op(verified_stmt( + "ALTER TABLE tab DROP CONSTRAINT IF EXISTS constraint_name", + )) { + AlterTableOperation::DropConstraint { + name: constr_name, + if_exists, + cascade, + } => { + assert_eq!("constraint_name", constr_name.to_string()); + assert!(if_exists); + assert!(!cascade); + } + _ => unreachable!(), } - let res = parse_sql_statements("ALTER TABLE tab DROP CONSTRAINT is_active TEXT"); + let res = parse_sql_statements(&format!("{alter_stmt} DROP CONSTRAINT is_active TEXT")); assert_eq!( ParserError::ParserError("Expected: end of statement, found: TEXT".to_string()), res.unwrap_err() @@ -5175,7 +4248,7 @@ fn run_explain_analyze( expected_verbose: bool, expected_analyze: bool, expected_format: Option, - expected_options: Option>, + exepcted_options: Option>, ) { match dialect.verified_stmt(query) { Statement::Explain { @@ -5183,7 +4256,6 @@ fn run_explain_analyze( analyze, verbose, query_plan, - estimate, statement, format, options, @@ -5191,9 +4263,8 @@ fn run_explain_analyze( assert_eq!(verbose, expected_verbose); assert_eq!(analyze, expected_analyze); assert_eq!(format, expected_format); - assert_eq!(options, expected_options); + assert_eq!(options, exepcted_options); assert!(!query_plan); - assert!(!estimate); assert_eq!("SELECT sqrt(id) FROM foo", statement.to_string()); } _ => panic!("Unexpected Statement, must be Explain"), @@ -5338,63 +4409,30 @@ fn parse_explain_query_plan() { ); } -#[test] -fn parse_explain_estimate() { - let statement = all_dialects().verified_stmt("EXPLAIN ESTIMATE SELECT sqrt(id) FROM foo"); - - match &statement { - Statement::Explain { - query_plan, - estimate, - analyze, - verbose, - statement, - .. - } => { - assert!(estimate); - assert!(!query_plan); - assert!(!analyze); - assert!(!verbose); - assert_eq!("SELECT sqrt(id) FROM foo", statement.to_string()); - } - _ => unreachable!(), - } - - assert_eq!( - "EXPLAIN ESTIMATE SELECT sqrt(id) FROM foo", - statement.to_string() - ); -} - #[test] fn parse_named_argument_function() { - let dialects = all_dialects_where(|d| { - d.supports_named_fn_args_with_rarrow_operator() - && !d.supports_named_fn_args_with_expr_name() - }); let sql = "SELECT FUN(a => '1', b => '2') FROM foo"; - let select = dialects.verified_only_select(sql); + let select = verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("FUN")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("FUN")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, args: vec![ FunctionArg::Named { name: Ident::new("a"), - arg: FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("1".to_owned())).with_empty_span() - )), + arg: FunctionArgExpr::Expr(Expr::Value(Value::SingleQuotedString( + "1".to_owned() + ))), operator: FunctionArgOperator::RightArrow }, FunctionArg::Named { name: Ident::new("b"), - arg: FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("2".to_owned())).with_empty_span() - )), + arg: FunctionArgExpr::Expr(Expr::Value(Value::SingleQuotedString( + "2".to_owned() + ))), operator: FunctionArgOperator::RightArrow }, ], @@ -5417,24 +4455,23 @@ fn parse_named_argument_function_with_eq_operator() { .verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("FUN")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("FUN")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, args: vec![ FunctionArg::Named { name: Ident::new("a"), - arg: FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("1".to_owned())).with_empty_span() - )), + arg: FunctionArgExpr::Expr(Expr::Value(Value::SingleQuotedString( + "1".to_owned() + ))), operator: FunctionArgOperator::Equals }, FunctionArg::Named { name: Ident::new("b"), - arg: FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("2".to_owned())).with_empty_span() - )), + arg: FunctionArgExpr::Expr(Expr::Value(Value::SingleQuotedString( + "2".to_owned() + ))), operator: FunctionArgOperator::Equals }, ], @@ -5458,7 +4495,7 @@ fn parse_named_argument_function_with_eq_operator() { [Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("bar"))), op: BinaryOperator::Eq, - right: Box::new(Expr::value(number("42"))), + right: Box::new(Expr::Value(number("42"))), }] ), ); @@ -5492,8 +4529,7 @@ fn parse_window_functions() { assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("row_number")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("row_number")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -5507,10 +4543,8 @@ fn parse_window_functions() { partition_by: vec![], order_by: vec![OrderByExpr { expr: Expr::Identifier(Ident::new("dt")), - options: OrderByOptions { - asc: Some(false), - nulls_first: None, - }, + asc: Some(false), + nulls_first: None, with_fill: None, }], window_frame: None, @@ -5550,8 +4584,7 @@ fn parse_named_window_functions() { WINDOW w AS (PARTITION BY x), win AS (ORDER BY y)"; supported_dialects.verified_stmt(sql); - let select = all_dialects_except(|d| d.is_table_alias(&Keyword::WINDOW, &mut Parser::new(d))) - .verified_only_select(sql); + let select = verified_only_select(sql); const EXPECTED_PROJ_QTY: usize = 2; assert_eq!(EXPECTED_PROJ_QTY, select.projection.len()); @@ -5581,7 +4614,6 @@ fn parse_named_window_functions() { #[test] fn parse_window_clause() { - let dialects = all_dialects_except(|d| d.is_table_alias(&Keyword::WINDOW, &mut Parser::new(d))); let sql = "SELECT * \ FROM mytable \ WINDOW \ @@ -5594,14 +4626,10 @@ fn parse_window_clause() { window7 AS (window1 ROWS UNBOUNDED PRECEDING), \ window8 AS (window1 PARTITION BY a ORDER BY b ROWS UNBOUNDED PRECEDING) \ ORDER BY C3"; - dialects.verified_only_select(sql); + verified_only_select(sql); let sql = "SELECT * from mytable WINDOW window1 AS window2"; - let dialects = all_dialects_except(|d| { - d.is::() - || d.is::() - || d.is_table_alias(&Keyword::WINDOW, &mut Parser::new(d)) - }); + let dialects = all_dialects_except(|d| d.is::() || d.is::()); let res = dialects.parse_sql_statements(sql); assert_eq!( ParserError::ParserError("Expected: (, found: window2".to_string()), @@ -5611,7 +4639,6 @@ fn parse_window_clause() { #[test] fn test_parse_named_window() { - let dialects = all_dialects_except(|d| d.is_table_alias(&Keyword::WINDOW, &mut Parser::new(d))); let sql = "SELECT \ MIN(c12) OVER window1 AS min1, \ MAX(c12) OVER window2 AS max1 \ @@ -5619,21 +4646,18 @@ fn test_parse_named_window() { WINDOW window1 AS (ORDER BY C12), \ window2 AS (PARTITION BY C11) \ ORDER BY C3"; - let actual_select_only = dialects.verified_only_select(sql); + let actual_select_only = verified_only_select(sql); let expected = Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, projection: vec![ SelectItem::ExprWithAlias { expr: Expr::Function(Function { - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "MIN".to_string(), quote_style: None, - span: Span::empty(), }]), - uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -5641,7 +4665,6 @@ fn test_parse_named_window() { Expr::Identifier(Ident { value: "c12".to_string(), quote_style: None, - span: Span::empty(), }), ))], clauses: vec![], @@ -5651,24 +4674,20 @@ fn test_parse_named_window() { over: Some(WindowType::NamedWindow(Ident { value: "window1".to_string(), quote_style: None, - span: Span::empty(), })), within_group: vec![], }), alias: Ident { value: "min1".to_string(), quote_style: None, - span: Span::empty(), }, }, SelectItem::ExprWithAlias { expr: Expr::Function(Function { - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "MAX".to_string(), quote_style: None, - span: Span::empty(), }]), - uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -5676,7 +4695,6 @@ fn test_parse_named_window() { Expr::Identifier(Ident { value: "c12".to_string(), quote_style: None, - span: Span::empty(), }), ))], clauses: vec![], @@ -5686,25 +4704,29 @@ fn test_parse_named_window() { over: Some(WindowType::NamedWindow(Ident { value: "window2".to_string(), quote_style: None, - span: Span::empty(), })), within_group: vec![], }), alias: Ident { value: "max1".to_string(), quote_style: None, - span: Span::empty(), }, }, ], - exclude: None, into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident { - value: "aggregate_test_100".to_string(), - quote_style: None, - span: Span::empty(), - }])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident { + value: "aggregate_test_100".to_string(), + quote_style: None, + }]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }], lateral_views: vec![], @@ -5720,7 +4742,6 @@ fn test_parse_named_window() { Ident { value: "window1".to_string(), quote_style: None, - span: Span::empty(), }, NamedWindowExpr::WindowSpec(WindowSpec { window_name: None, @@ -5729,12 +4750,9 @@ fn test_parse_named_window() { expr: Expr::Identifier(Ident { value: "C12".to_string(), quote_style: None, - span: Span::empty(), }), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, + asc: None, + nulls_first: None, with_fill: None, }], window_frame: None, @@ -5744,14 +4762,12 @@ fn test_parse_named_window() { Ident { value: "window2".to_string(), quote_style: None, - span: Span::empty(), }, NamedWindowExpr::WindowSpec(WindowSpec { window_name: None, partition_by: vec![Expr::Identifier(Ident { value: "C11".to_string(), quote_style: None, - span: Span::empty(), })], order_by: vec![], window_frame: None, @@ -5762,17 +4778,12 @@ fn test_parse_named_window() { window_before_qualify: true, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }; assert_eq!(actual_select_only, expected); } #[test] fn parse_window_and_qualify_clause() { - let dialects = all_dialects_except(|d| { - d.is_table_alias(&Keyword::WINDOW, &mut Parser::new(d)) - || d.is_table_alias(&Keyword::QUALIFY, &mut Parser::new(d)) - }); let sql = "SELECT \ MIN(c12) OVER window1 AS min1 \ FROM aggregate_test_100 \ @@ -5780,7 +4791,7 @@ fn parse_window_and_qualify_clause() { WINDOW window1 AS (ORDER BY C12), \ window2 AS (PARTITION BY C11) \ ORDER BY C3"; - dialects.verified_only_select(sql); + verified_only_select(sql); let sql = "SELECT \ MIN(c12) OVER window1 AS min1 \ @@ -5789,7 +4800,7 @@ fn parse_window_and_qualify_clause() { window2 AS (PARTITION BY C11) \ QUALIFY ROW_NUMBER() OVER my_window \ ORDER BY C3"; - dialects.verified_only_select(sql); + verified_only_select(sql); } #[test] @@ -5820,14 +4831,14 @@ fn parse_literal_integer() { let select = verified_only_select(sql); assert_eq!(3, select.projection.len()); assert_eq!( - &Expr::value(number("1")), + &Expr::Value(number("1")), expr_from_projection(&select.projection[0]), ); // negative literal is parsed as a - and expr assert_eq!( &UnaryOp { op: UnaryOperator::Minus, - expr: Box::new(Expr::value(number("10"))) + expr: Box::new(Expr::Value(number("10"))) }, expr_from_projection(&select.projection[1]), ); @@ -5835,7 +4846,7 @@ fn parse_literal_integer() { assert_eq!( &UnaryOp { op: UnaryOperator::Plus, - expr: Box::new(Expr::value(number("20"))) + expr: Box::new(Expr::Value(number("20"))) }, expr_from_projection(&select.projection[2]), ) @@ -5849,11 +4860,11 @@ fn parse_literal_decimal() { let select = verified_only_select(sql); assert_eq!(2, select.projection.len()); assert_eq!( - &Expr::value(number("0.300000000000000004")), + &Expr::Value(number("0.300000000000000004")), expr_from_projection(&select.projection[0]), ); assert_eq!( - &Expr::value(number("9007199254740993.0")), + &Expr::Value(number("9007199254740993.0")), expr_from_projection(&select.projection[1]), ) } @@ -5864,17 +4875,15 @@ fn parse_literal_string() { let select = verified_only_select(sql); assert_eq!(3, select.projection.len()); assert_eq!( - &Expr::Value((Value::SingleQuotedString("one".to_string())).with_empty_span()), + &Expr::Value(Value::SingleQuotedString("one".to_string())), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value( - (Value::NationalStringLiteral("national string".to_string())).with_empty_span() - ), + &Expr::Value(Value::NationalStringLiteral("national string".to_string())), expr_from_projection(&select.projection[1]) ); assert_eq!( - &Expr::Value((Value::HexStringLiteral("deadBEEF".to_string())).with_empty_span()), + &Expr::Value(Value::HexStringLiteral("deadBEEF".to_string())), expr_from_projection(&select.projection[2]) ); @@ -5889,10 +4898,7 @@ fn parse_literal_date() { assert_eq!( &Expr::TypedString { data_type: DataType::Date, - value: ValueWithSpan { - value: Value::SingleQuotedString("1999-01-01".into()), - span: Span::empty(), - } + value: "1999-01-01".into(), }, expr_from_projection(only(&select.projection)), ); @@ -5905,10 +4911,7 @@ fn parse_literal_time() { assert_eq!( &Expr::TypedString { data_type: DataType::Time(None, TimezoneInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString("01:23:34".into()), - span: Span::empty(), - }, + value: "01:23:34".into(), }, expr_from_projection(only(&select.projection)), ); @@ -5921,10 +4924,7 @@ fn parse_literal_datetime() { assert_eq!( &Expr::TypedString { data_type: DataType::Datetime(None), - value: ValueWithSpan { - value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()), - span: Span::empty(), - }, + value: "1999-01-01 01:23:34.45".into(), }, expr_from_projection(only(&select.projection)), ); @@ -5937,10 +4937,7 @@ fn parse_literal_timestamp_without_time_zone() { assert_eq!( &Expr::TypedString { data_type: DataType::Timestamp(None, TimezoneInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString("1999-01-01 01:23:34".into()), - span: Span::empty(), - }, + value: "1999-01-01 01:23:34".into(), }, expr_from_projection(only(&select.projection)), ); @@ -5955,10 +4952,7 @@ fn parse_literal_timestamp_with_time_zone() { assert_eq!( &Expr::TypedString { data_type: DataType::Timestamp(None, TimezoneInfo::Tz), - value: ValueWithSpan { - value: Value::SingleQuotedString("1999-01-01 01:23:34Z".into()), - span: Span::empty(), - }, + value: "1999-01-01 01:23:34Z".into(), }, expr_from_projection(only(&select.projection)), ); @@ -5974,9 +4968,7 @@ fn parse_interval_all() { let select = verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString(String::from("1-1"))).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString(String::from("1-1")))), leading_field: Some(DateTimeField::Year), leading_precision: None, last_field: Some(DateTimeField::Month), @@ -5989,9 +4981,9 @@ fn parse_interval_all() { let select = verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString(String::from("01:01.01"))).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString(String::from( + "01:01.01" + )))), leading_field: Some(DateTimeField::Minute), leading_precision: Some(5), last_field: Some(DateTimeField::Second), @@ -6004,9 +4996,7 @@ fn parse_interval_all() { let select = verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString(String::from("1"))).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString(String::from("1")))), leading_field: Some(DateTimeField::Second), leading_precision: Some(5), last_field: None, @@ -6019,9 +5009,7 @@ fn parse_interval_all() { let select = verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString(String::from("10"))).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString(String::from("10")))), leading_field: Some(DateTimeField::Hour), leading_precision: None, last_field: None, @@ -6034,7 +5022,7 @@ fn parse_interval_all() { let select = verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::value(number("5"))), + value: Box::new(Expr::Value(number("5"))), leading_field: Some(DateTimeField::Day), leading_precision: None, last_field: None, @@ -6043,26 +5031,11 @@ fn parse_interval_all() { expr_from_projection(only(&select.projection)), ); - let sql = "SELECT INTERVAL 5 DAYS"; - let select = verified_only_select(sql); - assert_eq!( - &Expr::Interval(Interval { - value: Box::new(Expr::value(number("5"))), - leading_field: Some(DateTimeField::Days), - leading_precision: None, - last_field: None, - fractional_seconds_precision: None, - }), - expr_from_projection(only(&select.projection)), - ); - let sql = "SELECT INTERVAL '10' HOUR (1)"; let select = verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString(String::from("10"))).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString(String::from("10")))), leading_field: Some(DateTimeField::Hour), leading_precision: Some(1), last_field: None, @@ -6085,18 +5058,10 @@ fn parse_interval_all() { verified_only_select("SELECT INTERVAL '1' YEAR"); verified_only_select("SELECT INTERVAL '1' MONTH"); - verified_only_select("SELECT INTERVAL '1' WEEK"); verified_only_select("SELECT INTERVAL '1' DAY"); verified_only_select("SELECT INTERVAL '1' HOUR"); verified_only_select("SELECT INTERVAL '1' MINUTE"); verified_only_select("SELECT INTERVAL '1' SECOND"); - verified_only_select("SELECT INTERVAL '1' YEARS"); - verified_only_select("SELECT INTERVAL '1' MONTHS"); - verified_only_select("SELECT INTERVAL '1' WEEKS"); - verified_only_select("SELECT INTERVAL '1' DAYS"); - verified_only_select("SELECT INTERVAL '1' HOURS"); - verified_only_select("SELECT INTERVAL '1' MINUTES"); - verified_only_select("SELECT INTERVAL '1' SECONDS"); verified_only_select("SELECT INTERVAL '1' YEAR TO MONTH"); verified_only_select("SELECT INTERVAL '1' DAY TO HOUR"); verified_only_select("SELECT INTERVAL '1' DAY TO MINUTE"); @@ -6106,21 +5071,10 @@ fn parse_interval_all() { verified_only_select("SELECT INTERVAL '1' MINUTE TO SECOND"); verified_only_select("SELECT INTERVAL 1 YEAR"); verified_only_select("SELECT INTERVAL 1 MONTH"); - verified_only_select("SELECT INTERVAL 1 WEEK"); verified_only_select("SELECT INTERVAL 1 DAY"); verified_only_select("SELECT INTERVAL 1 HOUR"); verified_only_select("SELECT INTERVAL 1 MINUTE"); verified_only_select("SELECT INTERVAL 1 SECOND"); - verified_only_select("SELECT INTERVAL 1 YEARS"); - verified_only_select("SELECT INTERVAL 1 MONTHS"); - verified_only_select("SELECT INTERVAL 1 WEEKS"); - verified_only_select("SELECT INTERVAL 1 DAYS"); - verified_only_select("SELECT INTERVAL 1 HOURS"); - verified_only_select("SELECT INTERVAL 1 MINUTES"); - verified_only_select("SELECT INTERVAL 1 SECONDS"); - verified_only_select( - "SELECT '2 years 15 months 100 weeks 99 hours 123456789 milliseconds'::INTERVAL", - ); } #[test] @@ -6131,9 +5085,9 @@ fn parse_interval_dont_require_unit() { let select = dialects.verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString(String::from("1 DAY"))).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString(String::from( + "1 DAY" + )))), leading_field: None, leading_precision: None, last_field: None, @@ -6152,6 +5106,7 @@ fn parse_interval_dont_require_unit() { #[test] fn parse_interval_require_unit() { let dialects = all_dialects_where(|d| d.require_interval_qualifier()); + let sql = "SELECT INTERVAL '1 DAY'"; let err = dialects.parse_sql_statements(sql).unwrap_err(); assert_eq!( @@ -6170,9 +5125,9 @@ fn parse_interval_require_qualifier() { expr_from_projection(only(&select.projection)), &Expr::Interval(Interval { value: Box::new(Expr::BinaryOp { - left: Box::new(Expr::value(number("1"))), + left: Box::new(Expr::Value(number("1"))), op: BinaryOperator::Plus, - right: Box::new(Expr::value(number("1"))), + right: Box::new(Expr::Value(number("1"))), }), leading_field: Some(DateTimeField::Day), leading_precision: None, @@ -6187,13 +5142,9 @@ fn parse_interval_require_qualifier() { expr_from_projection(only(&select.projection)), &Expr::Interval(Interval { value: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Value( - (Value::SingleQuotedString("1".to_string())).with_empty_span() - )), + left: Box::new(Expr::Value(Value::SingleQuotedString("1".to_string()))), op: BinaryOperator::Plus, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("1".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString("1".to_string()))), }), leading_field: Some(DateTimeField::Day), leading_precision: None, @@ -6209,18 +5160,12 @@ fn parse_interval_require_qualifier() { &Expr::Interval(Interval { value: Box::new(Expr::BinaryOp { left: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Value( - (Value::SingleQuotedString("1".to_string())).with_empty_span() - )), + left: Box::new(Expr::Value(Value::SingleQuotedString("1".to_string()))), op: BinaryOperator::Plus, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("2".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString("2".to_string()))), }), op: BinaryOperator::Minus, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("3".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString("3".to_string()))), }), leading_field: Some(DateTimeField::Day), leading_precision: None, @@ -6239,9 +5184,9 @@ fn parse_interval_disallow_interval_expr() { assert_eq!( expr_from_projection(only(&select.projection)), &Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString(String::from("1 DAY"))).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString(String::from( + "1 DAY" + )))), leading_field: None, leading_precision: None, last_field: None, @@ -6262,9 +5207,9 @@ fn parse_interval_disallow_interval_expr() { expr_from_projection(only(&select.projection)), &Expr::BinaryOp { left: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString(String::from("1 DAY"))).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString(String::from( + "1 DAY" + )))), leading_field: None, leading_precision: None, last_field: None, @@ -6272,9 +5217,9 @@ fn parse_interval_disallow_interval_expr() { })), op: BinaryOperator::Gt, right: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString(String::from("1 SECOND"))).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString(String::from( + "1 SECOND" + )))), leading_field: None, leading_precision: None, last_field: None, @@ -6292,9 +5237,9 @@ fn interval_disallow_interval_expr_gt() { expr, Expr::BinaryOp { left: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString("1 second".to_string())).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString( + "1 second".to_string() + ))), leading_field: None, leading_precision: None, last_field: None, @@ -6304,7 +5249,6 @@ fn interval_disallow_interval_expr_gt() { right: Box::new(Expr::Identifier(Ident { value: "x".to_string(), quote_style: None, - span: Span::empty(), })), } ) @@ -6319,9 +5263,9 @@ fn interval_disallow_interval_expr_double_colon() { Expr::Cast { kind: CastKind::DoubleColon, expr: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString("1 second".to_string())).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString( + "1 second".to_string() + ))), leading_field: None, leading_precision: None, last_field: None, @@ -6345,23 +5289,27 @@ fn parse_interval_and_or_xor() { let expected_ast = vec![Statement::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, projection: vec![UnnamedExpr(Expr::Identifier(Ident { value: "col".to_string(), quote_style: None, - span: Span::empty(), }))], - exclude: None, into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident { - value: "test".to_string(), - quote_style: None, - span: Span::empty(), - }])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident { + value: "test".to_string(), + quote_style: None, + }]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }], lateral_views: vec![], @@ -6371,20 +5319,18 @@ fn parse_interval_and_or_xor() { left: Box::new(Expr::Identifier(Ident { value: "d3_date".to_string(), quote_style: None, - span: Span::empty(), })), op: BinaryOperator::Gt, right: Box::new(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident { value: "d1_date".to_string(), quote_style: None, - span: Span::empty(), })), op: BinaryOperator::Plus, right: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString("5 days".to_string())).with_empty_span(), - )), + value: Box::new(Expr::Value(Value::SingleQuotedString( + "5 days".to_string(), + ))), leading_field: None, leading_precision: None, last_field: None, @@ -6397,20 +5343,18 @@ fn parse_interval_and_or_xor() { left: Box::new(Expr::Identifier(Ident { value: "d2_date".to_string(), quote_style: None, - span: Span::empty(), })), op: BinaryOperator::Gt, right: Box::new(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident { value: "d1_date".to_string(), quote_style: None, - span: Span::empty(), })), op: BinaryOperator::Plus, right: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value( - (Value::SingleQuotedString("3 days".to_string())).with_empty_span(), - )), + value: Box::new(Expr::Value(Value::SingleQuotedString( + "3 days".to_string(), + ))), leading_field: None, leading_precision: None, last_field: None, @@ -6429,16 +5373,16 @@ fn parse_interval_and_or_xor() { window_before_qualify: false, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], }))]; assert_eq!(actual_ast, expected_ast); @@ -6464,15 +5408,15 @@ fn parse_interval_and_or_xor() { #[test] fn parse_at_timezone() { - let zero = Expr::value(number("0")); + let zero = Expr::Value(number("0")); let sql = "SELECT FROM_UNIXTIME(0) AT TIME ZONE 'UTC-06:00' FROM t"; let select = verified_only_select(sql); assert_eq!( &Expr::AtTimeZone { timestamp: Box::new(call("FROM_UNIXTIME", [zero.clone()])), - time_zone: Box::new(Expr::Value( - (Value::SingleQuotedString("UTC-06:00".to_string())).with_empty_span() - )), + time_zone: Box::new(Expr::Value(Value::SingleQuotedString( + "UTC-06:00".to_string() + ))), }, expr_from_projection(only(&select.projection)), ); @@ -6486,19 +5430,16 @@ fn parse_at_timezone() { [ Expr::AtTimeZone { timestamp: Box::new(call("FROM_UNIXTIME", [zero])), - time_zone: Box::new(Expr::Value( - (Value::SingleQuotedString("UTC-06:00".to_string())).with_empty_span() - )), + time_zone: Box::new(Expr::Value(Value::SingleQuotedString( + "UTC-06:00".to_string() + ))), }, - Expr::Value( - (Value::SingleQuotedString("%Y-%m-%dT%H".to_string())).with_empty_span() - ) + Expr::Value(Value::SingleQuotedString("%Y-%m-%dT%H".to_string()),) ] ), alias: Ident { value: "hour".to_string(), quote_style: Some('"'), - span: Span::empty(), }, }, only(&select.projection), @@ -6531,9 +5472,7 @@ fn parse_json_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::JSON, - value: ValueWithSpan { - value: Value::SingleQuotedString( - r#"{ + value: r#"{ "id": 10, "type": "fruit", "name": "apple", @@ -6553,35 +5492,12 @@ fn parse_json_keyword() { ] } }"# - .to_string() - ), - span: Span::empty(), - } + .into() }, expr_from_projection(only(&select.projection)), ); } -#[test] -fn parse_typed_strings() { - let expr = verified_expr(r#"JSON '{"foo":"bar"}'"#); - assert_eq!( - Expr::TypedString { - data_type: DataType::JSON, - value: ValueWithSpan { - value: Value::SingleQuotedString(r#"{"foo":"bar"}"#.into()), - span: Span::empty(), - } - }, - expr - ); - - if let Expr::TypedString { data_type, value } = expr { - assert_eq!(DataType::JSON, data_type); - assert_eq!(r#"{"foo":"bar"}"#, value.into_string().unwrap()); - } -} - #[test] fn parse_bignumeric_keyword() { let sql = r#"SELECT BIGNUMERIC '0'"#; @@ -6589,10 +5505,7 @@ fn parse_bignumeric_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString(r#"0"#.into()), - span: Span::empty(), - } + value: r#"0"#.into() }, expr_from_projection(only(&select.projection)), ); @@ -6603,10 +5516,7 @@ fn parse_bignumeric_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString(r#"123456"#.into()), - span: Span::empty(), - } + value: r#"123456"#.into() }, expr_from_projection(only(&select.projection)), ); @@ -6617,10 +5527,7 @@ fn parse_bignumeric_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString(r#"-3.14"#.into()), - span: Span::empty(), - } + value: r#"-3.14"#.into() }, expr_from_projection(only(&select.projection)), ); @@ -6631,10 +5538,7 @@ fn parse_bignumeric_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString(r#"-0.54321"#.into()), - span: Span::empty(), - } + value: r#"-0.54321"#.into() }, expr_from_projection(only(&select.projection)), ); @@ -6645,10 +5549,7 @@ fn parse_bignumeric_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString(r#"1.23456e05"#.into()), - span: Span::empty(), - } + value: r#"1.23456e05"#.into() }, expr_from_projection(only(&select.projection)), ); @@ -6659,10 +5560,7 @@ fn parse_bignumeric_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString(r#"-9.876e-3"#.into()), - span: Span::empty(), - } + value: r#"-9.876e-3"#.into() }, expr_from_projection(only(&select.projection)), ); @@ -6690,9 +5588,7 @@ fn parse_table_function() { assert_eq!( call( "FUN", - [Expr::Value( - (Value::SingleQuotedString("1".to_owned())).with_empty_span() - )], + [Expr::Value(Value::SingleQuotedString("1".to_owned()))], ), expr ); @@ -6714,40 +5610,6 @@ fn parse_table_function() { ); } -#[test] -fn parse_select_with_alias_and_column_defs() { - let sql = r#"SELECT * FROM jsonb_to_record('{"a": "x", "b": 2}'::JSONB) AS x (a TEXT, b INT)"#; - let select = verified_only_select(sql); - - match only(&select.from) { - TableWithJoins { - relation: TableFactor::Table { - alias: Some(alias), .. - }, - .. - } => { - assert_eq!(alias.name.value, "x"); - assert_eq!( - alias.columns, - vec![ - TableAliasColumnDef { - name: Ident::new("a"), - data_type: Some(DataType::Text), - }, - TableAliasColumnDef { - name: Ident::new("b"), - data_type: Some(DataType::Int(None)), - }, - ] - ); - } - _ => unreachable!( - "Expecting only TableWithJoins with TableFactor::Table, got {:#?}", - select.from - ), - } -} - #[test] fn parse_unnest() { let sql = "SELECT UNNEST(make_array(1, 2, 3))"; @@ -6875,9 +5737,9 @@ fn parse_unnest_in_from_clause() { array_exprs: vec![call( "make_array", [ - Expr::value(number("1")), - Expr::value(number("2")), - Expr::value(number("3")), + Expr::Value(number("1")), + Expr::Value(number("2")), + Expr::Value(number("3")), ], )], with_offset: false, @@ -6901,14 +5763,14 @@ fn parse_unnest_in_from_clause() { call( "make_array", [ - Expr::value(number("1")), - Expr::value(number("2")), - Expr::value(number("3")), + Expr::Value(number("1")), + Expr::Value(number("2")), + Expr::Value(number("3")), ], ), call( "make_array", - [Expr::value(number("5")), Expr::value(number("6"))], + [Expr::Value(number("5")), Expr::Value(number("6"))], ), ], with_offset: false, @@ -6951,32 +5813,26 @@ fn parse_searched_case_expr() { let select = verified_only_select(sql); assert_eq!( &Case { - case_token: AttachedToken::empty(), - end_token: AttachedToken::empty(), operand: None, conditions: vec![ - CaseWhen { - condition: IsNull(Box::new(Identifier(Ident::new("bar")))), - result: Expr::value(Value::SingleQuotedString("null".to_string())), + IsNull(Box::new(Identifier(Ident::new("bar")))), + BinaryOp { + left: Box::new(Identifier(Ident::new("bar"))), + op: Eq, + right: Box::new(Expr::Value(number("0"))), }, - CaseWhen { - condition: BinaryOp { - left: Box::new(Identifier(Ident::new("bar"))), - op: Eq, - right: Box::new(Expr::value(number("0"))), - }, - result: Expr::value(Value::SingleQuotedString("=0".to_string())), - }, - CaseWhen { - condition: BinaryOp { - left: Box::new(Identifier(Ident::new("bar"))), - op: GtEq, - right: Box::new(Expr::value(number("0"))), - }, - result: Expr::value(Value::SingleQuotedString(">=0".to_string())), + BinaryOp { + left: Box::new(Identifier(Ident::new("bar"))), + op: GtEq, + right: Box::new(Expr::Value(number("0"))), }, ], - else_result: Some(Box::new(Expr::value(Value::SingleQuotedString( + results: vec![ + Expr::Value(Value::SingleQuotedString("null".to_string())), + Expr::Value(Value::SingleQuotedString("=0".to_string())), + Expr::Value(Value::SingleQuotedString(">=0".to_string())), + ], + else_result: Some(Box::new(Expr::Value(Value::SingleQuotedString( "<0".to_string() )))), }, @@ -6992,14 +5848,10 @@ fn parse_simple_case_expr() { use self::Expr::{Case, Identifier}; assert_eq!( &Case { - case_token: AttachedToken::empty(), - end_token: AttachedToken::empty(), operand: Some(Box::new(Identifier(Ident::new("foo")))), - conditions: vec![CaseWhen { - condition: Expr::value(number("1")), - result: Expr::value(Value::SingleQuotedString("Y".to_string())), - }], - else_result: Some(Box::new(Expr::value(Value::SingleQuotedString( + conditions: vec![Expr::Value(number("1"))], + results: vec![Expr::Value(Value::SingleQuotedString("Y".to_string()))], + else_result: Some(Box::new(Expr::Value(Value::SingleQuotedString( "N".to_string() )))), }, @@ -7026,11 +5878,27 @@ fn parse_implicit_join() { assert_eq!( vec![ TableWithJoins { - relation: table_from_name(ObjectName::from(vec!["t1".into()])), + relation: TableFactor::Table { + name: ObjectName(vec!["t1".into()]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }, TableWithJoins { - relation: table_from_name(ObjectName::from(vec!["t2".into()])), + relation: TableFactor::Table { + name: ObjectName(vec!["t2".into()]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }, ], @@ -7042,19 +5910,51 @@ fn parse_implicit_join() { assert_eq!( vec![ TableWithJoins { - relation: table_from_name(ObjectName::from(vec!["t1a".into()])), + relation: TableFactor::Table { + name: ObjectName(vec!["t1a".into()]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![Join { - relation: table_from_name(ObjectName::from(vec!["t1b".into()])), + relation: TableFactor::Table { + name: ObjectName(vec!["t1b".into()]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, global: false, - join_operator: JoinOperator::Join(JoinConstraint::Natural), + join_operator: JoinOperator::Inner(JoinConstraint::Natural), }], }, TableWithJoins { - relation: table_from_name(ObjectName::from(vec!["t2a".into()])), + relation: TableFactor::Table { + name: ObjectName(vec!["t2a".into()]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![Join { - relation: table_from_name(ObjectName::from(vec!["t2b".into()])), + relation: TableFactor::Table { + name: ObjectName(vec!["t2b".into()]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, global: false, - join_operator: JoinOperator::Join(JoinConstraint::Natural), + join_operator: JoinOperator::Inner(JoinConstraint::Natural), }], }, ], @@ -7068,7 +5968,15 @@ fn parse_cross_join() { let select = verified_only_select(sql); assert_eq!( Join { - relation: table_from_name(ObjectName::from(vec![Ident::new("t2")])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident::new("t2")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, global: false, join_operator: JoinOperator::CrossJoin, }, @@ -7086,16 +5994,13 @@ fn parse_joins_on() { ) -> Join { Join { relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new(relation.into())]), + name: ObjectName(vec![Ident::new(relation.into())]), alias, args: None, with_hints: vec![], version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }, global, join_operator: f(JoinConstraint::On(Expr::BinaryOp { @@ -7112,7 +6017,7 @@ fn parse_joins_on() { "t2", table_alias("foo"), false, - JoinOperator::Join, + JoinOperator::Inner, )] ); one_statement_parses_to( @@ -7122,18 +6027,10 @@ fn parse_joins_on() { // Test parsing of different join operators assert_eq!( only(&verified_only_select("SELECT * FROM t1 JOIN t2 ON c1 = c2").from).joins, - vec![join_with_constraint("t2", None, false, JoinOperator::Join)] - ); - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 INNER JOIN t2 ON c1 = c2").from).joins, vec![join_with_constraint("t2", None, false, JoinOperator::Inner)] ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 LEFT JOIN t2 ON c1 = c2").from).joins, - vec![join_with_constraint("t2", None, false, JoinOperator::Left)] - ); - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 LEFT OUTER JOIN t2 ON c1 = c2").from).joins, vec![join_with_constraint( "t2", None, @@ -7143,10 +6040,6 @@ fn parse_joins_on() { ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 RIGHT JOIN t2 ON c1 = c2").from).joins, - vec![join_with_constraint("t2", None, false, JoinOperator::Right)] - ); - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 RIGHT OUTER JOIN t2 ON c1 = c2").from).joins, vec![join_with_constraint( "t2", None, @@ -7154,10 +6047,6 @@ fn parse_joins_on() { JoinOperator::RightOuter )] ); - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 SEMI JOIN t2 ON c1 = c2").from).joins, - vec![join_with_constraint("t2", None, false, JoinOperator::Semi)] - ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 LEFT SEMI JOIN t2 ON c1 = c2").from).joins, vec![join_with_constraint( @@ -7176,10 +6065,6 @@ fn parse_joins_on() { JoinOperator::RightSemi )] ); - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 ANTI JOIN t2 ON c1 = c2").from).joins, - vec![join_with_constraint("t2", None, false, JoinOperator::Anti)] - ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 LEFT ANTI JOIN t2 ON c1 = c2").from).joins, vec![join_with_constraint( @@ -7228,21 +6113,16 @@ fn parse_joins_using() { ) -> Join { Join { relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new(relation.into())]), + name: ObjectName(vec![Ident::new(relation.into())]), alias, args: None, with_hints: vec![], version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }, global: false, - join_operator: f(JoinConstraint::Using(vec![ObjectName::from(vec![ - "c1".into() - ])])), + join_operator: f(JoinConstraint::Using(vec!["c1".into()])), } } // Test parsing of aliases @@ -7251,7 +6131,7 @@ fn parse_joins_using() { vec![join_with_constraint( "t2", table_alias("foo"), - JoinOperator::Join, + JoinOperator::Inner, )] ); one_statement_parses_to( @@ -7261,32 +6141,16 @@ fn parse_joins_using() { // Test parsing of different join operators assert_eq!( only(&verified_only_select("SELECT * FROM t1 JOIN t2 USING(c1)").from).joins, - vec![join_with_constraint("t2", None, JoinOperator::Join)] - ); - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 INNER JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::Inner)] ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 LEFT JOIN t2 USING(c1)").from).joins, - vec![join_with_constraint("t2", None, JoinOperator::Left)] - ); - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 LEFT OUTER JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::LeftOuter)] ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 RIGHT JOIN t2 USING(c1)").from).joins, - vec![join_with_constraint("t2", None, JoinOperator::Right)] - ); - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 RIGHT OUTER JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::RightOuter)] ); - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 SEMI JOIN t2 USING(c1)").from).joins, - vec![join_with_constraint("t2", None, JoinOperator::Semi)] - ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 LEFT SEMI JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::LeftSemi)] @@ -7295,10 +6159,6 @@ fn parse_joins_using() { only(&verified_only_select("SELECT * FROM t1 RIGHT SEMI JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::RightSemi)] ); - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 ANTI JOIN t2 USING(c1)").from).joins, - vec![join_with_constraint("t2", None, JoinOperator::Anti)] - ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 LEFT ANTI JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::LeftAnti)] @@ -7311,7 +6171,6 @@ fn parse_joins_using() { only(&verified_only_select("SELECT * FROM t1 FULL JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::FullOuter)] ); - verified_stmt("SELECT * FROM tbl1 AS t1 JOIN tbl2 AS t2 USING(t2.col1)"); } #[test] @@ -7319,55 +6178,33 @@ fn parse_natural_join() { fn natural_join(f: impl Fn(JoinConstraint) -> JoinOperator, alias: Option) -> Join { Join { relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("t2")]), + name: ObjectName(vec![Ident::new("t2")]), alias, args: None, with_hints: vec![], version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }, global: false, join_operator: f(JoinConstraint::Natural), } } - // unspecified join + // if not specified, inner join as default assert_eq!( only(&verified_only_select("SELECT * FROM t1 NATURAL JOIN t2").from).joins, - vec![natural_join(JoinOperator::Join, None)] - ); - - // inner join explicitly - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 NATURAL INNER JOIN t2").from).joins, vec![natural_join(JoinOperator::Inner, None)] ); - // left join explicitly assert_eq!( only(&verified_only_select("SELECT * FROM t1 NATURAL LEFT JOIN t2").from).joins, - vec![natural_join(JoinOperator::Left, None)] - ); - - // left outer join explicitly - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 NATURAL LEFT OUTER JOIN t2").from).joins, vec![natural_join(JoinOperator::LeftOuter, None)] ); // right join explicitly assert_eq!( only(&verified_only_select("SELECT * FROM t1 NATURAL RIGHT JOIN t2").from).joins, - vec![natural_join(JoinOperator::Right, None)] - ); - - // right outer join explicitly - assert_eq!( - only(&verified_only_select("SELECT * FROM t1 NATURAL RIGHT OUTER JOIN t2").from).joins, vec![natural_join(JoinOperator::RightOuter, None)] ); @@ -7380,7 +6217,7 @@ fn parse_natural_join() { // natural join another table with alias assert_eq!( only(&verified_only_select("SELECT * FROM t1 NATURAL JOIN t2 AS t3").from).joins, - vec![natural_join(JoinOperator::Join, table_alias("t3"))] + vec![natural_join(JoinOperator::Inner, table_alias("t3"))] ); let sql = "SELECT * FROM t1 natural"; @@ -7447,19 +6284,24 @@ fn parse_join_nesting() { #[test] fn parse_join_syntax_variants() { - verified_stmt("SELECT c1 FROM t1 JOIN t2 USING(c1)"); - verified_stmt("SELECT c1 FROM t1 INNER JOIN t2 USING(c1)"); - verified_stmt("SELECT c1 FROM t1 LEFT JOIN t2 USING(c1)"); - verified_stmt("SELECT c1 FROM t1 LEFT OUTER JOIN t2 USING(c1)"); - verified_stmt("SELECT c1 FROM t1 RIGHT JOIN t2 USING(c1)"); - verified_stmt("SELECT c1 FROM t1 RIGHT OUTER JOIN t2 USING(c1)"); + one_statement_parses_to( + "SELECT c1 FROM t1 INNER JOIN t2 USING(c1)", + "SELECT c1 FROM t1 JOIN t2 USING(c1)", + ); + one_statement_parses_to( + "SELECT c1 FROM t1 LEFT OUTER JOIN t2 USING(c1)", + "SELECT c1 FROM t1 LEFT JOIN t2 USING(c1)", + ); + one_statement_parses_to( + "SELECT c1 FROM t1 RIGHT OUTER JOIN t2 USING(c1)", + "SELECT c1 FROM t1 RIGHT JOIN t2 USING(c1)", + ); one_statement_parses_to( "SELECT c1 FROM t1 FULL OUTER JOIN t2 USING(c1)", "SELECT c1 FROM t1 FULL JOIN t2 USING(c1)", ); - let dialects = all_dialects_except(|d| d.is_table_alias(&Keyword::OUTER, &mut Parser::new(d))); - let res = dialects.parse_sql_statements("SELECT * FROM a OUTER JOIN b ON 1"); + let res = parse_sql_statements("SELECT * FROM a OUTER JOIN b ON 1"); assert_eq!( ParserError::ParserError("Expected: APPLY, found: JOIN".to_string()), res.unwrap_err() @@ -7527,10 +6369,7 @@ fn parse_cte_renamed_columns() { let sql = "WITH cte (col1, col2) AS (SELECT foo, bar FROM baz) SELECT * FROM cte"; let query = all_dialects().verified_query(sql); assert_eq!( - vec![ - TableAliasColumnDef::from_name("col1"), - TableAliasColumnDef::from_name("col2") - ], + vec![Ident::new("col1"), Ident::new("col2")], query .with .unwrap() @@ -7558,45 +6397,19 @@ fn parse_recursive_cte() { name: Ident { value: "nums".to_string(), quote_style: None, - span: Span::empty(), }, - columns: vec![TableAliasColumnDef::from_name("val")], + columns: vec![Ident { + value: "val".to_string(), + quote_style: None, + }], }, query: Box::new(cte_query), from: None, materialized: None, - closing_paren_token: AttachedToken::empty(), }; assert_eq!(with.cte_tables.first().unwrap(), &expected); } -#[test] -fn parse_cte_in_data_modification_statements() { - match verified_stmt("WITH x AS (SELECT 1) UPDATE t SET bar = (SELECT * FROM x)") { - Statement::Query(query) => { - assert_eq!(query.with.unwrap().to_string(), "WITH x AS (SELECT 1)"); - assert!(matches!(*query.body, SetExpr::Update(_))); - } - other => panic!("Expected: UPDATE, got: {other:?}"), - } - - match verified_stmt("WITH t (x) AS (SELECT 9) DELETE FROM q WHERE id IN (SELECT x FROM t)") { - Statement::Query(query) => { - assert_eq!(query.with.unwrap().to_string(), "WITH t (x) AS (SELECT 9)"); - assert!(matches!(*query.body, SetExpr::Delete(_))); - } - other => panic!("Expected: DELETE, got: {other:?}"), - } - - match verified_stmt("WITH x AS (SELECT 42) INSERT INTO t SELECT foo FROM x") { - Statement::Query(query) => { - assert_eq!(query.with.unwrap().to_string(), "WITH x AS (SELECT 42)"); - assert!(matches!(*query.body, SetExpr::Insert(_))); - } - other => panic!("Expected: INSERT, got: {other:?}"), - } -} - #[test] fn parse_derived_tables() { let sql = "SELECT a.x, b.y FROM (SELECT x FROM foo) AS a CROSS JOIN (SELECT y FROM bar) AS b"; @@ -7633,9 +6446,17 @@ fn parse_derived_tables() { }), }, joins: vec![Join { - relation: table_from_name(ObjectName::from(vec!["t2".into()])), + relation: TableFactor::Table { + name: ObjectName(vec!["t2".into()]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, global: false, - join_operator: JoinOperator::Join(JoinConstraint::Natural), + join_operator: JoinOperator::Inner(JoinConstraint::Natural), }], }), alias: None, @@ -7644,7 +6465,7 @@ fn parse_derived_tables() { } #[test] -fn parse_union_except_intersect_minus() { +fn parse_union_except_intersect() { // TODO: add assertions verified_stmt("SELECT 1 UNION SELECT 2"); verified_stmt("SELECT 1 UNION ALL SELECT 2"); @@ -7670,13 +6491,6 @@ fn parse_union_except_intersect_minus() { verified_stmt("SELECT 1 AS x, 2 AS y INTERSECT BY NAME SELECT 9 AS y, 8 AS x"); verified_stmt("SELECT 1 AS x, 2 AS y INTERSECT ALL BY NAME SELECT 9 AS y, 8 AS x"); verified_stmt("SELECT 1 AS x, 2 AS y INTERSECT DISTINCT BY NAME SELECT 9 AS y, 8 AS x"); - - // Dialects that support `MINUS` as column identifier - // do not support `MINUS` as a set operator. - let dialects = all_dialects_where(|d| !d.is_column_alias(&Keyword::MINUS, &mut Parser::new(d))); - dialects.verified_stmt("SELECT 1 MINUS SELECT 2"); - dialects.verified_stmt("SELECT 1 MINUS ALL SELECT 2"); - dialects.verified_stmt("SELECT 1 MINUS DISTINCT SELECT 1"); } #[test] @@ -7684,7 +6498,7 @@ fn parse_values() { verified_stmt("SELECT * FROM (VALUES (1), (2), (3))"); verified_stmt("SELECT * FROM (VALUES (1), (2), (3)), (VALUES (1, 2, 3))"); verified_stmt("SELECT * FROM (VALUES (1)) UNION VALUES (1)"); - verified_stmt("SELECT * FROM (VALUES ROW(1, NULL, 'a'), ROW(2, NULL, 'b')) AS t (a, b, c)"); + verified_stmt("SELECT * FROM (VALUES ROW(1, true, 'a'), ROW(2, false, 'b')) AS t (a, b, c)"); } #[test] @@ -7721,17 +6535,7 @@ fn parse_multiple_statements() { ); test_with("DELETE FROM foo", "SELECT", " bar"); test_with("INSERT INTO foo VALUES (1)", "SELECT", " bar"); - // Since MySQL supports the `CREATE TABLE SELECT` syntax, this needs to be handled separately - let res = parse_sql_statements("CREATE TABLE foo (baz INT); SELECT bar"); - assert_eq!( - vec![ - one_statement_parses_to("CREATE TABLE foo (baz INT)", ""), - one_statement_parses_to("SELECT bar", ""), - ], - res.unwrap() - ); - // Check that extra semicolon at the end is stripped by normalization: - one_statement_parses_to("CREATE TABLE foo (baz INT);", "CREATE TABLE foo (baz INT)"); + test_with("CREATE TABLE foo (baz INT)", "SELECT", " bar"); // Make sure that empty statements do not cause an error: let res = parse_sql_statements(";;"); assert_eq!(0, res.unwrap().len()); @@ -7757,9 +6561,6 @@ fn parse_substring() { verified_stmt("SELECT SUBSTRING('1', 1, 3)"); verified_stmt("SELECT SUBSTRING('1', 1)"); verified_stmt("SELECT SUBSTRING('1' FOR 3)"); - verified_stmt("SELECT SUBSTRING('foo' FROM 1 FOR 2) FROM t"); - verified_stmt("SELECT SUBSTR('foo' FROM 1 FOR 2) FROM t"); - verified_stmt("SELECT SUBSTR('foo', 1, 2) FROM t"); } #[test] @@ -7781,15 +6582,13 @@ fn parse_overlay() { let select = verified_only_select(sql); assert_eq!( &Expr::Overlay { - expr: Box::new(Expr::Value( - (Value::SingleQuotedString("abcdef".to_string())).with_empty_span() - )), + expr: Box::new(Expr::Value(Value::SingleQuotedString("abcdef".to_string()))), overlay_what: Box::new(Expr::Identifier(Ident::new("name"))), - overlay_from: Box::new(Expr::value(number("3"))), + overlay_from: Box::new(Expr::Value(number("3"))), overlay_for: Some(Box::new(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("id"))), op: BinaryOperator::Plus, - right: Box::new(Expr::value(number("1"))), + right: Box::new(Expr::Value(number("1"))), })), }, expr_from_projection(only(&select.projection)) @@ -7840,6 +6639,7 @@ fn parse_trim() { Box::new(MySqlDialect {}), //Box::new(BigQueryDialect {}), Box::new(SQLiteDialect {}), + Box::new(DuckDbDialect {}), ]); assert_eq!( @@ -7975,7 +6775,6 @@ fn parse_create_view() { let sql = "CREATE VIEW myschema.myview AS SELECT foo FROM bar"; match verified_stmt(sql) { Statement::CreateView { - or_alter, name, columns, query, @@ -7988,9 +6787,7 @@ fn parse_create_view() { if_not_exists, temporary, to, - params, } => { - assert_eq!(or_alter, false); assert_eq!("myschema.myview", name.to_string()); assert_eq!(Vec::::new(), columns); assert_eq!("SELECT foo FROM bar", query.to_string()); @@ -8002,13 +6799,10 @@ fn parse_create_view() { assert!(!late_binding); assert!(!if_not_exists); assert!(!temporary); - assert!(to.is_none()); - assert!(params.is_none()); + assert!(to.is_none()) } _ => unreachable!(), } - - let _ = verified_stmt("CREATE OR ALTER VIEW v AS SELECT 1"); } #[test] @@ -8020,13 +6814,11 @@ fn parse_create_view_with_options() { CreateTableOptions::With(vec![ SqlOption::KeyValue { key: "foo".into(), - value: Expr::Value( - (Value::SingleQuotedString("bar".into())).with_empty_span() - ), + value: Expr::Value(Value::SingleQuotedString("bar".into())), }, SqlOption::KeyValue { key: "a".into(), - value: Expr::value(number("123")), + value: Expr::Value(number("123")), }, ]), options @@ -8043,7 +6835,6 @@ fn parse_create_view_with_columns() { // match all_dialects().verified_stmt(sql) { match all_dialects_except(|d| d.is::()).verified_stmt(sql) { Statement::CreateView { - or_alter, name, columns, or_replace, @@ -8056,9 +6847,7 @@ fn parse_create_view_with_columns() { if_not_exists, temporary, to, - params, } => { - assert_eq!(or_alter, false); assert_eq!("v", name.to_string()); assert_eq!( columns, @@ -8067,7 +6856,7 @@ fn parse_create_view_with_columns() { .map(|name| ViewColumnDef { name, data_type: None, - options: None, + options: None }) .collect::>() ); @@ -8080,8 +6869,7 @@ fn parse_create_view_with_columns() { assert!(!late_binding); assert!(!if_not_exists); assert!(!temporary); - assert!(to.is_none()); - assert!(params.is_none()); + assert!(to.is_none()) } _ => unreachable!(), } @@ -8092,7 +6880,6 @@ fn parse_create_view_temporary() { let sql = "CREATE TEMPORARY VIEW myschema.myview AS SELECT foo FROM bar"; match verified_stmt(sql) { Statement::CreateView { - or_alter, name, columns, query, @@ -8105,9 +6892,7 @@ fn parse_create_view_temporary() { if_not_exists, temporary, to, - params, } => { - assert_eq!(or_alter, false); assert_eq!("myschema.myview", name.to_string()); assert_eq!(Vec::::new(), columns); assert_eq!("SELECT foo FROM bar", query.to_string()); @@ -8119,8 +6904,7 @@ fn parse_create_view_temporary() { assert!(!late_binding); assert!(!if_not_exists); assert!(temporary); - assert!(to.is_none()); - assert!(params.is_none()); + assert!(to.is_none()) } _ => unreachable!(), } @@ -8131,7 +6915,6 @@ fn parse_create_or_replace_view() { let sql = "CREATE OR REPLACE VIEW v AS SELECT 1"; match verified_stmt(sql) { Statement::CreateView { - or_alter, name, columns, or_replace, @@ -8144,9 +6927,7 @@ fn parse_create_or_replace_view() { if_not_exists, temporary, to, - params, } => { - assert_eq!(or_alter, false); assert_eq!("v", name.to_string()); assert_eq!(columns, vec![]); assert_eq!(options, CreateTableOptions::None); @@ -8158,8 +6939,7 @@ fn parse_create_or_replace_view() { assert!(!late_binding); assert!(!if_not_exists); assert!(!temporary); - assert!(to.is_none()); - assert!(params.is_none()); + assert!(to.is_none()) } _ => unreachable!(), } @@ -8174,7 +6954,6 @@ fn parse_create_or_replace_materialized_view() { let sql = "CREATE OR REPLACE MATERIALIZED VIEW v AS SELECT 1"; match verified_stmt(sql) { Statement::CreateView { - or_alter, name, columns, or_replace, @@ -8187,9 +6966,7 @@ fn parse_create_or_replace_materialized_view() { if_not_exists, temporary, to, - params, } => { - assert_eq!(or_alter, false); assert_eq!("v", name.to_string()); assert_eq!(columns, vec![]); assert_eq!(options, CreateTableOptions::None); @@ -8201,8 +6978,7 @@ fn parse_create_or_replace_materialized_view() { assert!(!late_binding); assert!(!if_not_exists); assert!(!temporary); - assert!(to.is_none()); - assert!(params.is_none()); + assert!(to.is_none()) } _ => unreachable!(), } @@ -8213,7 +6989,6 @@ fn parse_create_materialized_view() { let sql = "CREATE MATERIALIZED VIEW myschema.myview AS SELECT foo FROM bar"; match verified_stmt(sql) { Statement::CreateView { - or_alter, name, or_replace, columns, @@ -8226,9 +7001,7 @@ fn parse_create_materialized_view() { if_not_exists, temporary, to, - params, } => { - assert_eq!(or_alter, false); assert_eq!("myschema.myview", name.to_string()); assert_eq!(Vec::::new(), columns); assert_eq!("SELECT foo FROM bar", query.to_string()); @@ -8240,8 +7013,7 @@ fn parse_create_materialized_view() { assert!(!late_binding); assert!(!if_not_exists); assert!(!temporary); - assert!(to.is_none()); - assert!(params.is_none()); + assert!(to.is_none()) } _ => unreachable!(), } @@ -8252,7 +7024,6 @@ fn parse_create_materialized_view_with_cluster_by() { let sql = "CREATE MATERIALIZED VIEW myschema.myview CLUSTER BY (foo) AS SELECT foo FROM bar"; match verified_stmt(sql) { Statement::CreateView { - or_alter, name, or_replace, columns, @@ -8265,9 +7036,7 @@ fn parse_create_materialized_view_with_cluster_by() { if_not_exists, temporary, to, - params, } => { - assert_eq!(or_alter, false); assert_eq!("myschema.myview", name.to_string()); assert_eq!(Vec::::new(), columns); assert_eq!("SELECT foo FROM bar", query.to_string()); @@ -8279,8 +7048,7 @@ fn parse_create_materialized_view_with_cluster_by() { assert!(!late_binding); assert!(!if_not_exists); assert!(!temporary); - assert!(to.is_none()); - assert!(params.is_none()); + assert!(to.is_none()) } _ => unreachable!(), } @@ -8362,9 +7130,6 @@ fn parse_drop_view() { } _ => unreachable!(), } - - verified_stmt("DROP MATERIALIZED VIEW a.b.c"); - verified_stmt("DROP MATERIALIZED VIEW IF EXISTS a.b.c"); } #[test] @@ -8378,70 +7143,53 @@ fn parse_invalid_subquery_without_parens() { #[test] fn parse_offset() { - // Dialects that support `OFFSET` as column identifiers - // don't support this syntax. - let dialects = - all_dialects_where(|d| !d.is_column_alias(&Keyword::OFFSET, &mut Parser::new(d))); - - let expected_limit_clause = &Some(LimitClause::LimitOffset { - limit: None, - offset: Some(Offset { - value: Expr::value(number("2")), - rows: OffsetRows::Rows, - }), - limit_by: vec![], + let expect = Some(Offset { + value: Expr::Value(number("2")), + rows: OffsetRows::Rows, }); - let ast = dialects.verified_query("SELECT foo FROM bar OFFSET 2 ROWS"); - assert_eq!(&ast.limit_clause, expected_limit_clause); - let ast = dialects.verified_query("SELECT foo FROM bar WHERE foo = 4 OFFSET 2 ROWS"); - assert_eq!(&ast.limit_clause, expected_limit_clause); - let ast = dialects.verified_query("SELECT foo FROM bar ORDER BY baz OFFSET 2 ROWS"); - assert_eq!(&ast.limit_clause, expected_limit_clause); - let ast = - dialects.verified_query("SELECT foo FROM bar WHERE foo = 4 ORDER BY baz OFFSET 2 ROWS"); - assert_eq!(&ast.limit_clause, expected_limit_clause); - let ast = - dialects.verified_query("SELECT foo FROM (SELECT * FROM bar OFFSET 2 ROWS) OFFSET 2 ROWS"); - assert_eq!(&ast.limit_clause, expected_limit_clause); + let ast = verified_query("SELECT foo FROM bar OFFSET 2 ROWS"); + assert_eq!(ast.offset, expect); + let ast = verified_query("SELECT foo FROM bar WHERE foo = 4 OFFSET 2 ROWS"); + assert_eq!(ast.offset, expect); + let ast = verified_query("SELECT foo FROM bar ORDER BY baz OFFSET 2 ROWS"); + assert_eq!(ast.offset, expect); + let ast = verified_query("SELECT foo FROM bar WHERE foo = 4 ORDER BY baz OFFSET 2 ROWS"); + assert_eq!(ast.offset, expect); + let ast = verified_query("SELECT foo FROM (SELECT * FROM bar OFFSET 2 ROWS) OFFSET 2 ROWS"); + assert_eq!(ast.offset, expect); match *ast.body { SetExpr::Select(s) => match only(s.from).relation { TableFactor::Derived { subquery, .. } => { - assert_eq!(&subquery.limit_clause, expected_limit_clause); + assert_eq!(subquery.offset, expect); } _ => panic!("Test broke"), }, _ => panic!("Test broke"), } - let expected_limit_clause = LimitClause::LimitOffset { - limit: None, - offset: Some(Offset { - value: Expr::value(number("0")), + let ast = verified_query("SELECT 'foo' OFFSET 0 ROWS"); + assert_eq!( + ast.offset, + Some(Offset { + value: Expr::Value(number("0")), rows: OffsetRows::Rows, - }), - limit_by: vec![], - }; - let ast = dialects.verified_query("SELECT 'foo' OFFSET 0 ROWS"); - assert_eq!(ast.limit_clause, Some(expected_limit_clause)); - let expected_limit_clause = LimitClause::LimitOffset { - limit: None, - offset: Some(Offset { - value: Expr::value(number("1")), + }) + ); + let ast = verified_query("SELECT 'foo' OFFSET 1 ROW"); + assert_eq!( + ast.offset, + Some(Offset { + value: Expr::Value(number("1")), rows: OffsetRows::Row, - }), - limit_by: vec![], - }; - let ast = dialects.verified_query("SELECT 'foo' OFFSET 1 ROW"); - assert_eq!(ast.limit_clause, Some(expected_limit_clause)); - let expected_limit_clause = LimitClause::LimitOffset { - limit: None, - offset: Some(Offset { - value: Expr::value(number("2")), + }) + ); + let ast = verified_query("SELECT 'foo' OFFSET 1"); + assert_eq!( + ast.offset, + Some(Offset { + value: Expr::Value(number("1")), rows: OffsetRows::None, - }), - limit_by: vec![], - }; - let ast = dialects.verified_query("SELECT 'foo' OFFSET 2"); - assert_eq!(ast.limit_clause, Some(expected_limit_clause)); + }) + ); } #[test] @@ -8449,7 +7197,7 @@ fn parse_fetch() { let fetch_first_two_rows_only = Some(Fetch { with_ties: false, percent: false, - quantity: Some(Expr::value(number("2"))), + quantity: Some(Expr::Value(number("2"))), }); let ast = verified_query("SELECT foo FROM bar FETCH FIRST 2 ROWS ONLY"); assert_eq!(ast.fetch, fetch_first_two_rows_only); @@ -8476,7 +7224,7 @@ fn parse_fetch() { Some(Fetch { with_ties: true, percent: false, - quantity: Some(Expr::value(number("2"))), + quantity: Some(Expr::Value(number("2"))), }) ); let ast = verified_query("SELECT foo FROM bar FETCH FIRST 50 PERCENT ROWS ONLY"); @@ -8485,21 +7233,19 @@ fn parse_fetch() { Some(Fetch { with_ties: false, percent: true, - quantity: Some(Expr::value(number("50"))), + quantity: Some(Expr::Value(number("50"))), }) ); let ast = verified_query( "SELECT foo FROM bar WHERE foo = 4 ORDER BY baz OFFSET 2 ROWS FETCH FIRST 2 ROWS ONLY", ); - let expected_limit_clause = Some(LimitClause::LimitOffset { - limit: None, - offset: Some(Offset { - value: Expr::value(number("2")), + assert_eq!( + ast.offset, + Some(Offset { + value: Expr::Value(number("2")), rows: OffsetRows::Rows, - }), - limit_by: vec![], - }); - assert_eq!(ast.limit_clause, expected_limit_clause); + }) + ); assert_eq!(ast.fetch, fetch_first_two_rows_only); let ast = verified_query( "SELECT foo FROM (SELECT * FROM bar FETCH FIRST 2 ROWS ONLY) FETCH FIRST 2 ROWS ONLY", @@ -8515,20 +7261,24 @@ fn parse_fetch() { _ => panic!("Test broke"), } let ast = verified_query("SELECT foo FROM (SELECT * FROM bar OFFSET 2 ROWS FETCH FIRST 2 ROWS ONLY) OFFSET 2 ROWS FETCH FIRST 2 ROWS ONLY"); - let expected_limit_clause = &Some(LimitClause::LimitOffset { - limit: None, - offset: Some(Offset { - value: Expr::value(number("2")), + assert_eq!( + ast.offset, + Some(Offset { + value: Expr::Value(number("2")), rows: OffsetRows::Rows, - }), - limit_by: vec![], - }); - assert_eq!(&ast.limit_clause, expected_limit_clause); + }) + ); assert_eq!(ast.fetch, fetch_first_two_rows_only); match *ast.body { SetExpr::Select(s) => match only(s.from).relation { TableFactor::Derived { subquery, .. } => { - assert_eq!(&subquery.limit_clause, expected_limit_clause); + assert_eq!( + subquery.offset, + Some(Offset { + value: Expr::Value(number("2")), + rows: OffsetRows::Rows, + }) + ); assert_eq!(subquery.fetch, fetch_first_two_rows_only); } _ => panic!("Test broke"), @@ -8567,7 +7317,7 @@ fn lateral_derived() { let lateral_str = if lateral_in { "LATERAL " } else { "" }; let sql = format!( "SELECT * FROM customer LEFT JOIN {lateral_str}\ - (SELECT * FROM orders WHERE orders.customer = customer.id LIMIT 3) AS orders ON 1" + (SELECT * FROM order WHERE order.customer = customer.id LIMIT 3) AS order ON true" ); let select = verified_only_select(&sql); let from = only(select.from); @@ -8575,9 +7325,7 @@ fn lateral_derived() { let join = &from.joins[0]; assert_eq!( join.join_operator, - JoinOperator::Left(JoinConstraint::On(Expr::Value( - (test_utils::number("1")).with_empty_span() - ))) + JoinOperator::LeftOuter(JoinConstraint::On(Expr::Value(Value::Boolean(true)))) ); if let TableFactor::Derived { lateral, @@ -8586,10 +7334,10 @@ fn lateral_derived() { } = join.relation { assert_eq!(lateral_in, lateral); - assert_eq!(Ident::new("orders"), alias.name); + assert_eq!(Ident::new("order"), alias.name); assert_eq!( subquery.to_string(), - "SELECT * FROM orders WHERE orders.customer = customer.id LIMIT 3" + "SELECT * FROM order WHERE order.customer = customer.id LIMIT 3" ); } else { unreachable!() @@ -8620,27 +7368,36 @@ fn lateral_function() { let sql = "SELECT * FROM customer LEFT JOIN LATERAL generate_series(1, customer.id)"; let actual_select_only = verified_only_select(sql); let expected = Select { - select_token: AttachedToken::empty(), distinct: None, top: None, - projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())], - exclude: None, top_before_distinct: false, + projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions { + opt_ilike: None, + opt_exclude: None, + opt_except: None, + opt_rename: None, + opt_replace: None, + })], into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident { - value: "customer".to_string(), - quote_style: None, - span: Span::empty(), - }])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident { + value: "customer".to_string(), + quote_style: None, + }]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![Join { relation: TableFactor::Function { lateral: true, - name: ObjectName::from(vec!["generate_series".into()]), + name: ObjectName(vec!["generate_series".into()]), args: vec![ - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (number("1")).with_empty_span(), - ))), + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value(number("1")))), FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::CompoundIdentifier( vec![Ident::new("customer"), Ident::new("id")], ))), @@ -8648,7 +7405,7 @@ fn lateral_function() { alias: None, }, global: false, - join_operator: JoinOperator::Left(JoinConstraint::None), + join_operator: JoinOperator::LeftOuter(JoinConstraint::None), }], }], lateral_views: vec![], @@ -8664,22 +7421,13 @@ fn lateral_function() { window_before_qualify: false, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }; assert_eq!(actual_select_only, expected); } #[test] fn parse_start_transaction() { - let dialects = all_dialects_except(|d| - // BigQuery and Snowflake does not support this syntax - // - // BigQuery: - // Snowflake: - d.is::() || d.is::()); - match dialects - .verified_stmt("START TRANSACTION READ ONLY, READ WRITE, ISOLATION LEVEL SERIALIZABLE") - { + match verified_stmt("START TRANSACTION READ ONLY, READ WRITE, ISOLATION LEVEL SERIALIZABLE") { Statement::StartTransaction { modes, .. } => assert_eq!( modes, vec![ @@ -8693,7 +7441,7 @@ fn parse_start_transaction() { // For historical reasons, PostgreSQL allows the commas between the modes to // be omitted. - match dialects.one_statement_parses_to( + match one_statement_parses_to( "START TRANSACTION READ ONLY READ WRITE ISOLATION LEVEL SERIALIZABLE", "START TRANSACTION READ ONLY, READ WRITE, ISOLATION LEVEL SERIALIZABLE", ) { @@ -8708,65 +7456,44 @@ fn parse_start_transaction() { _ => unreachable!(), } - dialects.verified_stmt("START TRANSACTION"); - dialects.verified_stmt("BEGIN"); - dialects.verified_stmt("BEGIN WORK"); - dialects.verified_stmt("BEGIN TRANSACTION"); + verified_stmt("START TRANSACTION"); + one_statement_parses_to("BEGIN", "BEGIN TRANSACTION"); + one_statement_parses_to("BEGIN WORK", "BEGIN TRANSACTION"); + one_statement_parses_to("BEGIN TRANSACTION", "BEGIN TRANSACTION"); - dialects.verified_stmt("START TRANSACTION ISOLATION LEVEL READ UNCOMMITTED"); - dialects.verified_stmt("START TRANSACTION ISOLATION LEVEL READ COMMITTED"); - dialects.verified_stmt("START TRANSACTION ISOLATION LEVEL REPEATABLE READ"); - dialects.verified_stmt("START TRANSACTION ISOLATION LEVEL SERIALIZABLE"); + verified_stmt("START TRANSACTION ISOLATION LEVEL READ UNCOMMITTED"); + verified_stmt("START TRANSACTION ISOLATION LEVEL READ COMMITTED"); + verified_stmt("START TRANSACTION ISOLATION LEVEL REPEATABLE READ"); + verified_stmt("START TRANSACTION ISOLATION LEVEL SERIALIZABLE"); // Regression test for https://github.com/sqlparser-rs/sqlparser-rs/pull/139, // in which START TRANSACTION would fail to parse if followed by a statement // terminator. assert_eq!( - dialects.parse_sql_statements("START TRANSACTION; SELECT 1"), + parse_sql_statements("START TRANSACTION; SELECT 1"), Ok(vec![ verified_stmt("START TRANSACTION"), verified_stmt("SELECT 1"), ]) ); - let res = dialects.parse_sql_statements("START TRANSACTION ISOLATION LEVEL BAD"); + let res = parse_sql_statements("START TRANSACTION ISOLATION LEVEL BAD"); assert_eq!( ParserError::ParserError("Expected: isolation level, found: BAD".to_string()), res.unwrap_err() ); - let res = dialects.parse_sql_statements("START TRANSACTION BAD"); + let res = parse_sql_statements("START TRANSACTION BAD"); assert_eq!( ParserError::ParserError("Expected: end of statement, found: BAD".to_string()), res.unwrap_err() ); - let res = dialects.parse_sql_statements("START TRANSACTION READ ONLY,"); + let res = parse_sql_statements("START TRANSACTION READ ONLY,"); assert_eq!( ParserError::ParserError("Expected: transaction mode, found: EOF".to_string()), res.unwrap_err() ); - - // MS-SQL syntax - let dialects = all_dialects_where(|d| d.supports_start_transaction_modifier()); - dialects.verified_stmt("BEGIN TRY"); - dialects.verified_stmt("BEGIN CATCH"); - - let dialects = all_dialects_where(|d| { - d.supports_start_transaction_modifier() && d.supports_end_transaction_modifier() - }); - dialects - .parse_sql_statements( - r#" - BEGIN TRY; - SELECT 1/0; - END TRY; - BEGIN CATCH; - EXECUTE foo; - END CATCH; - "#, - ) - .unwrap(); } #[test] @@ -8775,11 +7502,11 @@ fn parse_set_transaction() { // TRANSACTION, so no need to duplicate the tests here. We just do a quick // sanity check. match verified_stmt("SET TRANSACTION READ ONLY, READ WRITE, ISOLATION LEVEL SERIALIZABLE") { - Statement::Set(Set::SetTransaction { + Statement::SetTransaction { modes, session, snapshot, - }) => { + } => { assert_eq!( modes, vec![ @@ -8798,40 +7525,21 @@ fn parse_set_transaction() { #[test] fn parse_set_variable() { match verified_stmt("SET SOMETHING = '1'") { - Statement::Set(Set::SingleAssignment { - scope, + Statement::SetVariable { + local, hivevar, - variable, - values, - }) => { - assert_eq!(scope, None); + variables, + value, + } => { + assert!(!local); assert!(!hivevar); - assert_eq!(variable, ObjectName::from(vec!["SOMETHING".into()])); assert_eq!( - values, - vec![Expr::Value( - (Value::SingleQuotedString("1".into())).with_empty_span() - )] + variables, + OneOrManyWithParens::One(ObjectName(vec!["SOMETHING".into()])) ); - } - _ => unreachable!(), - } - - match verified_stmt("SET GLOBAL VARIABLE = 'Value'") { - Statement::Set(Set::SingleAssignment { - scope, - hivevar, - variable, - values, - }) => { - assert_eq!(scope, Some(ContextModifier::Global)); - assert!(!hivevar); - assert_eq!(variable, ObjectName::from(vec!["VARIABLE".into()])); assert_eq!( - values, - vec![Expr::Value( - (Value::SingleQuotedString("Value".into())).with_empty_span() - )] + value, + vec![Expr::Value(Value::SingleQuotedString("1".into()))] ); } _ => unreachable!(), @@ -8840,21 +7548,28 @@ fn parse_set_variable() { let multi_variable_dialects = all_dialects_where(|d| d.supports_parenthesized_set_variables()); let sql = r#"SET (a, b, c) = (1, 2, 3)"#; match multi_variable_dialects.verified_stmt(sql) { - Statement::Set(Set::ParenthesizedAssignments { variables, values }) => { + Statement::SetVariable { + local, + hivevar, + variables, + value, + } => { + assert!(!local); + assert!(!hivevar); assert_eq!( variables, - vec![ - ObjectName::from(vec!["a".into()]), - ObjectName::from(vec!["b".into()]), - ObjectName::from(vec!["c".into()]), - ] + OneOrManyWithParens::Many(vec![ + ObjectName(vec!["a".into()]), + ObjectName(vec!["b".into()]), + ObjectName(vec!["c".into()]), + ]) ); assert_eq!( - values, + value, vec![ - Expr::value(number("1")), - Expr::value(number("2")), - Expr::value(number("3")), + Expr::Value(number("1")), + Expr::Value(number("2")), + Expr::Value(number("3")), ] ); } @@ -8910,20 +7625,21 @@ fn parse_set_variable() { #[test] fn parse_set_role_as_variable() { match verified_stmt("SET role = 'foobar'") { - Statement::Set(Set::SingleAssignment { - scope, + Statement::SetVariable { + local, hivevar, - variable, - values, - }) => { - assert_eq!(scope, None); + variables, + value, + } => { + assert!(!local); assert!(!hivevar); - assert_eq!(variable, ObjectName::from(vec!["role".into()])); assert_eq!( - values, - vec![Expr::Value( - (Value::SingleQuotedString("foobar".into())).with_empty_span() - )] + variables, + OneOrManyWithParens::One(ObjectName(vec!["role".into()])) + ); + assert_eq!( + value, + vec![Expr::Value(Value::SingleQuotedString("foobar".into()))] ); } _ => unreachable!(), @@ -8939,16 +7655,15 @@ fn parse_double_colon_cast_at_timezone() { &Expr::AtTimeZone { timestamp: Box::new(Expr::Cast { kind: CastKind::DoubleColon, - expr: Box::new(Expr::Value( - (Value::SingleQuotedString("2001-01-01T00:00:00.000Z".to_string())) - .with_empty_span() - )), + expr: Box::new(Expr::Value(Value::SingleQuotedString( + "2001-01-01T00:00:00.000Z".to_string() + ),)), data_type: DataType::Timestamp(None, TimezoneInfo::None), format: None }), - time_zone: Box::new(Expr::Value( - (Value::SingleQuotedString("Europe/Brussels".to_string())).with_empty_span() - )), + time_zone: Box::new(Expr::Value(Value::SingleQuotedString( + "Europe/Brussels".to_string() + ))), }, expr_from_projection(only(&select.projection)), ); @@ -8957,20 +7672,21 @@ fn parse_double_colon_cast_at_timezone() { #[test] fn parse_set_time_zone() { match verified_stmt("SET TIMEZONE = 'UTC'") { - Statement::Set(Set::SingleAssignment { - scope, + Statement::SetVariable { + local, hivevar, - variable, - values, - }) => { - assert_eq!(scope, None); + variables: variable, + value, + } => { + assert!(!local); assert!(!hivevar); - assert_eq!(variable, ObjectName::from(vec!["TIMEZONE".into()])); assert_eq!( - values, - vec![Expr::Value( - (Value::SingleQuotedString("UTC".into())).with_empty_span() - )] + variable, + OneOrManyWithParens::One(ObjectName(vec!["TIMEZONE".into()])) + ); + assert_eq!( + value, + vec![Expr::Value(Value::SingleQuotedString("UTC".into()))] ); } _ => unreachable!(), @@ -8979,15 +7695,26 @@ fn parse_set_time_zone() { one_statement_parses_to("SET TIME ZONE TO 'UTC'", "SET TIMEZONE = 'UTC'"); } +#[test] +fn parse_set_time_zone_alias() { + match verified_stmt("SET TIME ZONE 'UTC'") { + Statement::SetTimeZone { local, value } => { + assert!(!local); + assert_eq!(value, Expr::Value(Value::SingleQuotedString("UTC".into()))); + } + _ => unreachable!(), + } +} + #[test] fn parse_commit() { match verified_stmt("COMMIT") { - Statement::Commit { chain: false, .. } => (), + Statement::Commit { chain: false } => (), _ => unreachable!(), } match verified_stmt("COMMIT AND CHAIN") { - Statement::Commit { chain: true, .. } => (), + Statement::Commit { chain: true } => (), _ => unreachable!(), } @@ -9002,17 +7729,13 @@ fn parse_commit() { #[test] fn parse_end() { - one_statement_parses_to("END AND NO CHAIN", "END"); - one_statement_parses_to("END WORK AND NO CHAIN", "END"); - one_statement_parses_to("END TRANSACTION AND NO CHAIN", "END"); - one_statement_parses_to("END WORK AND CHAIN", "END AND CHAIN"); - one_statement_parses_to("END TRANSACTION AND CHAIN", "END AND CHAIN"); - one_statement_parses_to("END WORK", "END"); - one_statement_parses_to("END TRANSACTION", "END"); - // MS-SQL syntax - let dialects = all_dialects_where(|d| d.supports_end_transaction_modifier()); - dialects.verified_stmt("END TRY"); - dialects.verified_stmt("END CATCH"); + one_statement_parses_to("END AND NO CHAIN", "COMMIT"); + one_statement_parses_to("END WORK AND NO CHAIN", "COMMIT"); + one_statement_parses_to("END TRANSACTION AND NO CHAIN", "COMMIT"); + one_statement_parses_to("END WORK AND CHAIN", "COMMIT AND CHAIN"); + one_statement_parses_to("END TRANSACTION AND CHAIN", "COMMIT AND CHAIN"); + one_statement_parses_to("END WORK", "COMMIT"); + one_statement_parses_to("END TRANSACTION", "COMMIT"); } #[test] @@ -9079,28 +7802,18 @@ fn ensure_multiple_dialects_are_tested() { #[test] fn parse_create_index() { let sql = "CREATE UNIQUE INDEX IF NOT EXISTS idx_name ON test(name,age DESC)"; - let indexed_columns: Vec = vec![ - IndexColumn { - operator_class: None, - column: OrderByExpr { - expr: Expr::Identifier(Ident::new("name")), - with_fill: None, - options: OrderByOptions { - asc: None, - nulls_first: None, - }, - }, + let indexed_columns = vec![ + OrderByExpr { + expr: Expr::Identifier(Ident::new("name")), + asc: None, + nulls_first: None, + with_fill: None, }, - IndexColumn { - operator_class: None, - column: OrderByExpr { - expr: Expr::Identifier(Ident::new("age")), - with_fill: None, - options: OrderByOptions { - asc: Some(false), - nulls_first: None, - }, - }, + OrderByExpr { + expr: Expr::Identifier(Ident::new("age")), + asc: Some(false), + nulls_first: None, + with_fill: None, }, ]; match verified_stmt(sql) { @@ -9124,29 +7837,19 @@ fn parse_create_index() { #[test] fn test_create_index_with_using_function() { - let sql = "CREATE UNIQUE INDEX IF NOT EXISTS idx_name ON test USING BTREE (name,age DESC)"; - let indexed_columns: Vec = vec![ - IndexColumn { - operator_class: None, - column: OrderByExpr { - expr: Expr::Identifier(Ident::new("name")), - with_fill: None, - options: OrderByOptions { - asc: None, - nulls_first: None, - }, - }, + let sql = "CREATE UNIQUE INDEX IF NOT EXISTS idx_name ON test USING btree (name,age DESC)"; + let indexed_columns = vec![ + OrderByExpr { + expr: Expr::Identifier(Ident::new("name")), + asc: None, + nulls_first: None, + with_fill: None, }, - IndexColumn { - operator_class: None, - column: OrderByExpr { - expr: Expr::Identifier(Ident::new("age")), - with_fill: None, - options: OrderByOptions { - asc: Some(false), - nulls_first: None, - }, - }, + OrderByExpr { + expr: Expr::Identifier(Ident::new("age")), + asc: Some(false), + nulls_first: None, + with_fill: None, }, ]; match verified_stmt(sql) { @@ -9165,7 +7868,7 @@ fn test_create_index_with_using_function() { }) => { assert_eq!("idx_name", name.to_string()); assert_eq!("test", table_name.to_string()); - assert_eq!("BTREE", using.unwrap().to_string()); + assert_eq!("btree", using.unwrap().to_string()); assert_eq!(indexed_columns, columns); assert!(unique); assert!(!concurrently); @@ -9180,22 +7883,17 @@ fn test_create_index_with_using_function() { #[test] fn test_create_index_with_with_clause() { let sql = "CREATE UNIQUE INDEX title_idx ON films(title) WITH (fillfactor = 70, single_param)"; - let indexed_columns: Vec = vec![IndexColumn { - column: OrderByExpr { - expr: Expr::Identifier(Ident::new("title")), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, - with_fill: None, - }, - operator_class: None, + let indexed_columns = vec![OrderByExpr { + expr: Expr::Identifier(Ident::new("title")), + asc: None, + nulls_first: None, + with_fill: None, }]; let with_parameters = vec![ Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("fillfactor"))), op: BinaryOperator::Eq, - right: Box::new(Expr::value(number("70"))), + right: Box::new(Expr::Value(number("70"))), }, Expr::Identifier(Ident::new("single_param")), ]; @@ -9313,7 +8011,7 @@ fn parse_grant() { granted_by, .. } => match (privileges, objects) { - (Privileges::Actions(actions), Some(GrantObjects::Tables(objects))) => { + (Privileges::Actions(actions), GrantObjects::Tables(objects)) => { assert_eq!( vec![ Action::Select { columns: None }, @@ -9323,12 +8021,10 @@ fn parse_grant() { Ident { value: "shape".into(), quote_style: None, - span: Span::empty(), }, Ident { value: "size".into(), quote_style: None, - span: Span::empty(), }, ]) }, @@ -9338,8 +8034,8 @@ fn parse_grant() { Action::References { columns: None }, Action::Trigger, Action::Connect, - Action::Create { obj_type: None }, - Action::Execute { obj_type: None }, + Action::Create, + Action::Execute, Action::Temporary, ], actions @@ -9363,7 +8059,7 @@ fn parse_grant() { with_grant_option, .. } => match (privileges, objects) { - (Privileges::Actions(actions), Some(GrantObjects::AllTablesInSchema { schemas })) => { + (Privileges::Actions(actions), GrantObjects::AllTablesInSchema { schemas }) => { assert_eq!(vec![Action::Insert { columns: None }], actions); assert_eq_vec(&["public"], &schemas); assert_eq_vec(&["browser"], &grantees); @@ -9383,7 +8079,7 @@ fn parse_grant() { granted_by, .. } => match (privileges, objects, granted_by) { - (Privileges::Actions(actions), Some(GrantObjects::Sequences(objects)), None) => { + (Privileges::Actions(actions), GrantObjects::Sequences(objects), None) => { assert_eq!( vec![Action::Usage, Action::Select { columns: None }], actions @@ -9420,7 +8116,7 @@ fn parse_grant() { Privileges::All { with_privileges_keyword, }, - Some(GrantObjects::Schemas(schemas)), + GrantObjects::Schemas(schemas), ) => { assert!(!with_privileges_keyword); assert_eq_vec(&["aa", "b"], &schemas); @@ -9437,10 +8133,7 @@ fn parse_grant() { objects, .. } => match (privileges, objects) { - ( - Privileges::Actions(actions), - Some(GrantObjects::AllSequencesInSchema { schemas }), - ) => { + (Privileges::Actions(actions), GrantObjects::AllSequencesInSchema { schemas }) => { assert_eq!(vec![Action::Usage], actions); assert_eq_vec(&["bus"], &schemas); } @@ -9448,77 +8141,18 @@ fn parse_grant() { }, _ => unreachable!(), } - - verified_stmt("GRANT SELECT ON ALL TABLES IN SCHEMA db1.sc1 TO ROLE role1"); - verified_stmt("GRANT SELECT ON ALL TABLES IN SCHEMA db1.sc1 TO ROLE role1 WITH GRANT OPTION"); - verified_stmt("GRANT SELECT ON ALL TABLES IN SCHEMA db1.sc1 TO DATABASE ROLE role1"); - verified_stmt("GRANT SELECT ON ALL TABLES IN SCHEMA db1.sc1 TO APPLICATION role1"); - verified_stmt("GRANT SELECT ON ALL TABLES IN SCHEMA db1.sc1 TO APPLICATION ROLE role1"); - verified_stmt("GRANT SELECT ON ALL TABLES IN SCHEMA db1.sc1 TO SHARE share1"); - verified_stmt("GRANT SELECT ON ALL VIEWS IN SCHEMA db1.sc1 TO ROLE role1"); - verified_stmt("GRANT SELECT ON ALL MATERIALIZED VIEWS IN SCHEMA db1.sc1 TO ROLE role1"); - verified_stmt("GRANT SELECT ON ALL EXTERNAL TABLES IN SCHEMA db1.sc1 TO ROLE role1"); - verified_stmt("GRANT USAGE ON SCHEMA sc1 TO a:b"); - verified_stmt("GRANT USAGE ON SCHEMA sc1 TO GROUP group1"); - verified_stmt("GRANT OWNERSHIP ON ALL TABLES IN SCHEMA DEV_STAS_ROGOZHIN TO ROLE ANALYST"); - verified_stmt("GRANT OWNERSHIP ON ALL TABLES IN SCHEMA DEV_STAS_ROGOZHIN TO ROLE ANALYST COPY CURRENT GRANTS"); - verified_stmt("GRANT OWNERSHIP ON ALL TABLES IN SCHEMA DEV_STAS_ROGOZHIN TO ROLE ANALYST REVOKE CURRENT GRANTS"); - verified_stmt("GRANT USAGE ON DATABASE db1 TO ROLE role1"); - verified_stmt("GRANT USAGE ON WAREHOUSE wh1 TO ROLE role1"); - verified_stmt("GRANT OWNERSHIP ON INTEGRATION int1 TO ROLE role1"); - verified_stmt("GRANT SELECT ON VIEW view1 TO ROLE role1"); - verified_stmt("GRANT EXEC ON my_sp TO runner"); - verified_stmt("GRANT UPDATE ON my_table TO updater_role AS dbo"); - all_dialects_where(|d| d.identifier_quote_style("none") == Some('[')) - .verified_stmt("GRANT SELECT ON [my_table] TO [public]"); - verified_stmt("GRANT SELECT ON FUTURE SCHEMAS IN DATABASE db1 TO ROLE role1"); - verified_stmt("GRANT SELECT ON FUTURE TABLES IN SCHEMA db1.sc1 TO ROLE role1"); - verified_stmt("GRANT SELECT ON FUTURE EXTERNAL TABLES IN SCHEMA db1.sc1 TO ROLE role1"); - verified_stmt("GRANT SELECT ON FUTURE VIEWS IN SCHEMA db1.sc1 TO ROLE role1"); - verified_stmt("GRANT SELECT ON FUTURE MATERIALIZED VIEWS IN SCHEMA db1.sc1 TO ROLE role1"); - verified_stmt("GRANT SELECT ON FUTURE SEQUENCES IN SCHEMA db1.sc1 TO ROLE role1"); - verified_stmt("GRANT USAGE ON PROCEDURE db1.sc1.foo(INT) TO ROLE role1"); - verified_stmt("GRANT USAGE ON FUNCTION db1.sc1.foo(INT) TO ROLE role1"); -} - -#[test] -fn parse_deny() { - let sql = "DENY INSERT, DELETE ON users TO analyst CASCADE AS admin"; - match verified_stmt(sql) { - Statement::Deny(deny) => { - assert_eq!( - Privileges::Actions(vec![Action::Insert { columns: None }, Action::Delete]), - deny.privileges - ); - assert_eq!( - &GrantObjects::Tables(vec![ObjectName::from(vec![Ident::new("users")])]), - &deny.objects - ); - assert_eq_vec(&["analyst"], &deny.grantees); - assert_eq!(Some(CascadeOption::Cascade), deny.cascade); - assert_eq!(Some(Ident::from("admin")), deny.granted_by); - } - _ => unreachable!(), - } - - verified_stmt("DENY SELECT, INSERT, UPDATE, DELETE ON db1.sc1 TO role1, role2"); - verified_stmt("DENY ALL ON db1.sc1 TO role1"); - verified_stmt("DENY EXEC ON my_sp TO runner"); - - all_dialects_where(|d| d.identifier_quote_style("none") == Some('[')) - .verified_stmt("DENY SELECT ON [my_table] TO [public]"); } #[test] fn test_revoke() { - let sql = "REVOKE ALL PRIVILEGES ON users, auth FROM analyst"; + let sql = "REVOKE ALL PRIVILEGES ON users, auth FROM analyst CASCADE"; match verified_stmt(sql) { Statement::Revoke { privileges, - objects: Some(GrantObjects::Tables(tables)), + objects: GrantObjects::Tables(tables), grantees, - granted_by, cascade, + granted_by, } => { assert_eq!( Privileges::All { @@ -9528,33 +8162,7 @@ fn test_revoke() { ); assert_eq_vec(&["users", "auth"], &tables); assert_eq_vec(&["analyst"], &grantees); - assert_eq!(cascade, None); - assert_eq!(None, granted_by); - } - _ => unreachable!(), - } -} - -#[test] -fn test_revoke_with_cascade() { - let sql = "REVOKE ALL PRIVILEGES ON users, auth FROM analyst CASCADE"; - match all_dialects_except(|d| d.is::()).verified_stmt(sql) { - Statement::Revoke { - privileges, - objects: Some(GrantObjects::Tables(tables)), - grantees, - granted_by, - cascade, - } => { - assert_eq!( - Privileges::All { - with_privileges_keyword: true - }, - privileges - ); - assert_eq_vec(&["users", "auth"], &tables); - assert_eq_vec(&["analyst"], &grantees); - assert_eq!(cascade, Some(CascadeOption::Cascade)); + assert!(cascade); assert_eq!(None, granted_by); } _ => unreachable!(), @@ -9573,7 +8181,6 @@ fn parse_merge() { source, on, clauses, - .. }, Statement::Merge { into: no_into, @@ -9581,7 +8188,6 @@ fn parse_merge() { source: source_no_into, on: on_no_into, clauses: clauses_no_into, - .. }, ) => { assert!(into); @@ -9590,7 +8196,7 @@ fn parse_merge() { assert_eq!( table, TableFactor::Table { - name: ObjectName::from(vec![Ident::new("s"), Ident::new("bar")]), + name: ObjectName(vec![Ident::new("s"), Ident::new("bar")]), alias: Some(TableAlias { name: Ident::new("dest"), columns: vec![], @@ -9600,9 +8206,6 @@ fn parse_merge() { version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], } ); assert_eq!(table, table_no_into); @@ -9614,20 +8217,23 @@ fn parse_merge() { subquery: Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, projection: vec![SelectItem::Wildcard( WildcardAdditionalOptions::default() )], - exclude: None, into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![ - Ident::new("s"), - Ident::new("foo") - ])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident::new("s"), Ident::new("foo")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }], lateral_views: vec![], @@ -9643,22 +8249,21 @@ fn parse_merge() { qualify: None, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], }), alias: Some(TableAlias { name: Ident { value: "stg".to_string(), quote_style: None, - span: Span::empty(), }, columns: vec![], }), @@ -9731,14 +8336,14 @@ fn parse_merge() { Ident::new("A"), ])), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("a".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString( + "a".to_string() + ))), }), action: MergeAction::Update { assignments: vec![ Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![ + target: AssignmentTarget::ColumnName(ObjectName(vec![ Ident::new("dest"), Ident::new("F") ])), @@ -9748,7 +8353,7 @@ fn parse_merge() { ]), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![ + target: AssignmentTarget::ColumnName(ObjectName(vec![ Ident::new("dest"), Ident::new("G") ])), @@ -9772,20 +8377,7 @@ fn parse_merge() { _ => unreachable!(), }; - let sql = "MERGE INTO s.bar AS dest USING newArrivals AS S ON (1 > 1) WHEN NOT MATCHED THEN INSERT VALUES (stg.A, stg.B, stg.C)"; - verified_stmt(sql); -} - -#[test] -fn test_merge_with_output() { - let sql = "MERGE INTO target_table USING source_table \ - ON target_table.id = source_table.oooid \ - WHEN MATCHED THEN \ - UPDATE SET target_table.description = source_table.description \ - WHEN NOT MATCHED THEN \ - INSERT (ID, description) VALUES (source_table.id, source_table.description) \ - OUTPUT inserted.* INTO log_target"; - + let sql = "MERGE INTO s.bar AS dest USING newArrivals AS S ON false WHEN NOT MATCHED THEN INSERT VALUES (stg.A, stg.B, stg.C)"; verified_stmt(sql); } @@ -9868,12 +8460,11 @@ fn test_lock_table() { let lock = ast.locks.pop().unwrap(); assert_eq!(lock.lock_type, LockType::Update); assert_eq!( - lock.of.unwrap(), - ObjectName::from(vec![Ident { + lock.of.unwrap().0, + vec![Ident { value: "school".to_string(), - quote_style: None, - span: Span::empty(), - }]) + quote_style: None + }] ); assert!(lock.nonblock.is_none()); @@ -9883,12 +8474,11 @@ fn test_lock_table() { let lock = ast.locks.pop().unwrap(); assert_eq!(lock.lock_type, LockType::Share); assert_eq!( - lock.of.unwrap(), - ObjectName::from(vec![Ident { + lock.of.unwrap().0, + vec![Ident { value: "school".to_string(), - quote_style: None, - span: Span::empty(), - }]) + quote_style: None + }] ); assert!(lock.nonblock.is_none()); @@ -9898,23 +8488,21 @@ fn test_lock_table() { let lock = ast.locks.remove(0); assert_eq!(lock.lock_type, LockType::Share); assert_eq!( - lock.of.unwrap(), - ObjectName::from(vec![Ident { + lock.of.unwrap().0, + vec![Ident { value: "school".to_string(), - quote_style: None, - span: Span::empty(), - }]) + quote_style: None + }] ); assert!(lock.nonblock.is_none()); let lock = ast.locks.remove(0); assert_eq!(lock.lock_type, LockType::Update); assert_eq!( - lock.of.unwrap(), - ObjectName::from(vec![Ident { + lock.of.unwrap().0, + vec![Ident { value: "student".to_string(), - quote_style: None, - span: Span::empty(), - }]) + quote_style: None + }] ); assert!(lock.nonblock.is_none()); } @@ -9927,12 +8515,11 @@ fn test_lock_nonblock() { let lock = ast.locks.pop().unwrap(); assert_eq!(lock.lock_type, LockType::Update); assert_eq!( - lock.of.unwrap(), - ObjectName::from(vec![Ident { + lock.of.unwrap().0, + vec![Ident { value: "school".to_string(), - quote_style: None, - span: Span::empty(), - }]) + quote_style: None + }] ); assert_eq!(lock.nonblock.unwrap(), NonBlock::SkipLocked); @@ -9942,12 +8529,11 @@ fn test_lock_nonblock() { let lock = ast.locks.pop().unwrap(); assert_eq!(lock.lock_type, LockType::Share); assert_eq!( - lock.of.unwrap(), - ObjectName::from(vec![Ident { + lock.of.unwrap().0, + vec![Ident { value: "school".to_string(), - quote_style: None, - span: Span::empty(), - }]) + quote_style: None + }] ); assert_eq!(lock.nonblock.unwrap(), NonBlock::Nowait); } @@ -9972,24 +8558,23 @@ fn test_placeholder() { Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("id"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Placeholder("$Id1".into())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::Placeholder("$Id1".into()))), }) ); - let ast = dialects.verified_query("SELECT * FROM student LIMIT $1 OFFSET $2"); - let expected_limit_clause = LimitClause::LimitOffset { - limit: Some(Expr::Value( - (Value::Placeholder("$1".into())).with_empty_span(), - )), - offset: Some(Offset { - value: Expr::Value((Value::Placeholder("$2".into())).with_empty_span()), + let sql = "SELECT * FROM student LIMIT $1 OFFSET $2"; + let ast = dialects.verified_query(sql); + assert_eq!( + ast.limit, + Some(Expr::Value(Value::Placeholder("$1".into()))) + ); + assert_eq!( + ast.offset, + Some(Offset { + value: Expr::Value(Value::Placeholder("$2".into())), rows: OffsetRows::None, }), - limit_by: vec![], - }; - assert_eq!(ast.limit_clause, Some(expected_limit_clause)); + ); let dialects = TestedDialects::new(vec![ Box::new(GenericDialect {}), @@ -10010,9 +8595,7 @@ fn test_placeholder() { Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("id"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Placeholder("?".into())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::Placeholder("?".into()))), }) ); @@ -10021,15 +8604,9 @@ fn test_placeholder() { assert_eq!( ast.projection, vec![ - UnnamedExpr(Expr::Value( - (Value::Placeholder("$fromage_français".into())).with_empty_span() - )), - UnnamedExpr(Expr::Value( - (Value::Placeholder(":x".into())).with_empty_span() - )), - UnnamedExpr(Expr::Value( - (Value::Placeholder("?123".into())).with_empty_span() - )), + UnnamedExpr(Expr::Value(Value::Placeholder("$fromage_français".into()))), + UnnamedExpr(Expr::Value(Value::Placeholder(":x".into()))), + UnnamedExpr(Expr::Value(Value::Placeholder("?123".into()))), ] ); } @@ -10069,47 +8646,48 @@ fn verified_expr(query: &str) -> Expr { #[test] fn parse_offset_and_limit() { let sql = "SELECT foo FROM bar LIMIT 1 OFFSET 2"; - let expected_limit_clause = Some(LimitClause::LimitOffset { - limit: Some(Expr::value(number("1"))), - offset: Some(Offset { - value: Expr::value(number("2")), - rows: OffsetRows::None, - }), - limit_by: vec![], + let expect = Some(Offset { + value: Expr::Value(number("2")), + rows: OffsetRows::None, }); let ast = verified_query(sql); - assert_eq!(ast.limit_clause, expected_limit_clause); + assert_eq!(ast.offset, expect); + assert_eq!(ast.limit, Some(Expr::Value(number("1")))); // different order is OK one_statement_parses_to("SELECT foo FROM bar OFFSET 2 LIMIT 1", sql); // mysql syntax is ok for some dialects - all_dialects_where(|d| d.supports_limit_comma()) - .verified_query("SELECT foo FROM bar LIMIT 2, 1"); + TestedDialects::new(vec![ + Box::new(GenericDialect {}), + Box::new(MySqlDialect {}), + Box::new(SQLiteDialect {}), + Box::new(ClickHouseDialect {}), + ]) + .one_statement_parses_to("SELECT foo FROM bar LIMIT 2, 1", sql); // expressions are allowed let sql = "SELECT foo FROM bar LIMIT 1 + 2 OFFSET 3 * 4"; let ast = verified_query(sql); - let expected_limit_clause = LimitClause::LimitOffset { - limit: Some(Expr::BinaryOp { - left: Box::new(Expr::value(number("1"))), + assert_eq!( + ast.limit, + Some(Expr::BinaryOp { + left: Box::new(Expr::Value(number("1"))), op: BinaryOperator::Plus, - right: Box::new(Expr::value(number("2"))), + right: Box::new(Expr::Value(number("2"))), }), - offset: Some(Offset { + ); + assert_eq!( + ast.offset, + Some(Offset { value: Expr::BinaryOp { - left: Box::new(Expr::value(number("3"))), + left: Box::new(Expr::Value(number("3"))), op: BinaryOperator::Multiply, - right: Box::new(Expr::value(number("4"))), + right: Box::new(Expr::Value(number("4"))), }, rows: OffsetRows::None, }), - limit_by: vec![], - }; - assert_eq!(ast.limit_clause, Some(expected_limit_clause),); - - // OFFSET without LIMIT - verified_stmt("SELECT foo FROM bar OFFSET 2"); + ); // Can't repeat OFFSET / LIMIT let res = parse_sql_statements("SELECT foo FROM bar OFFSET 2 OFFSET 2"); @@ -10134,11 +8712,10 @@ fn parse_offset_and_limit() { #[test] fn parse_time_functions() { fn test_time_function(func_name: &'static str) { - let sql = format!("SELECT {func_name}()"); + let sql = format!("SELECT {}()", func_name); let select = verified_only_select(&sql); let select_localtime_func_call_ast = Function { - name: ObjectName::from(vec![Ident::new(func_name)]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new(func_name)]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -10156,7 +8733,7 @@ fn parse_time_functions() { ); // Validating Parenthesis - let sql_without_parens = format!("SELECT {func_name}"); + let sql_without_parens = format!("SELECT {}", func_name); let mut ast_without_parens = select_localtime_func_call_ast; ast_without_parens.args = FunctionArguments::None; assert_eq!( @@ -10176,9 +8753,7 @@ fn parse_time_functions() { fn parse_position() { assert_eq!( Expr::Position { - expr: Box::new(Expr::Value( - (Value::SingleQuotedString("@".to_string())).with_empty_span() - )), + expr: Box::new(Expr::Value(Value::SingleQuotedString("@".to_string()))), r#in: Box::new(Expr::Identifier(Ident::new("field"))), }, verified_expr("POSITION('@' IN field)"), @@ -10189,9 +8764,9 @@ fn parse_position() { call( "position", [ - Expr::Value((Value::SingleQuotedString("an".to_owned())).with_empty_span()), - Expr::Value((Value::SingleQuotedString("banana".to_owned())).with_empty_span()), - Expr::value(number("1")), + Expr::Value(Value::SingleQuotedString("an".to_owned())), + Expr::Value(Value::SingleQuotedString("banana".to_owned())), + Expr::Value(number("1")), ] ), verified_expr("position('an', 'banana', 1)") @@ -10236,46 +8811,6 @@ fn parse_is_boolean() { verified_expr(sql) ); - let sql = "a IS NORMALIZED"; - assert_eq!( - IsNormalized { - expr: Box::new(Identifier(Ident::new("a"))), - form: None, - negated: false, - }, - verified_expr(sql) - ); - - let sql = "a IS NOT NORMALIZED"; - assert_eq!( - IsNormalized { - expr: Box::new(Identifier(Ident::new("a"))), - form: None, - negated: true, - }, - verified_expr(sql) - ); - - let sql = "a IS NFKC NORMALIZED"; - assert_eq!( - IsNormalized { - expr: Box::new(Identifier(Ident::new("a"))), - form: Some(NormalizationForm::NFKC), - negated: false, - }, - verified_expr(sql) - ); - - let sql = "a IS NOT NFKD NORMALIZED"; - assert_eq!( - IsNormalized { - expr: Box::new(Identifier(Ident::new("a"))), - form: Some(NormalizationForm::NFKD), - negated: true, - }, - verified_expr(sql) - ); - let sql = "a IS UNKNOWN"; assert_eq!( IsUnknown(Box::new(Identifier(Ident::new("a")))), @@ -10294,12 +8829,6 @@ fn parse_is_boolean() { verified_stmt("SELECT f FROM foo WHERE field IS FALSE"); verified_stmt("SELECT f FROM foo WHERE field IS NOT FALSE"); - verified_stmt("SELECT f FROM foo WHERE field IS NORMALIZED"); - verified_stmt("SELECT f FROM foo WHERE field IS NFC NORMALIZED"); - verified_stmt("SELECT f FROM foo WHERE field IS NFD NORMALIZED"); - verified_stmt("SELECT f FROM foo WHERE field IS NOT NORMALIZED"); - verified_stmt("SELECT f FROM foo WHERE field IS NOT NFKC NORMALIZED"); - verified_stmt("SELECT f FROM foo WHERE field IS UNKNOWN"); verified_stmt("SELECT f FROM foo WHERE field IS NOT UNKNOWN"); @@ -10307,37 +8836,7 @@ fn parse_is_boolean() { let res = parse_sql_statements(sql); assert_eq!( ParserError::ParserError( - "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: 0" - .to_string() - ), - res.unwrap_err() - ); - - let sql = "SELECT s, s IS XYZ NORMALIZED FROM foo"; - let res = parse_sql_statements(sql); - assert_eq!( - ParserError::ParserError( - "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: XYZ" - .to_string() - ), - res.unwrap_err() - ); - - let sql = "SELECT s, s IS NFKC FROM foo"; - let res = parse_sql_statements(sql); - assert_eq!( - ParserError::ParserError( - "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: FROM" - .to_string() - ), - res.unwrap_err() - ); - - let sql = "SELECT s, s IS TRIM(' NFKC ') FROM foo"; - let res = parse_sql_statements(sql); - assert_eq!( - ParserError::ParserError( - "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: TRIM" + "Expected: [NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS, found: 0" .to_string() ), res.unwrap_err() @@ -10412,7 +8911,7 @@ fn parse_cache_table() { verified_stmt(format!("CACHE TABLE '{cache_table_name}'").as_str()), Statement::Cache { table_flag: None, - table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), + table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), has_as: false, options: vec![], query: None, @@ -10422,8 +8921,8 @@ fn parse_cache_table() { assert_eq!( verified_stmt(format!("CACHE {table_flag} TABLE '{cache_table_name}'").as_str()), Statement::Cache { - table_flag: Some(ObjectName::from(vec![Ident::new(table_flag)])), - table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), + table_flag: Some(ObjectName(vec![Ident::new(table_flag)])), + table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), has_as: false, options: vec![], query: None, @@ -10438,17 +8937,17 @@ fn parse_cache_table() { .as_str() ), Statement::Cache { - table_flag: Some(ObjectName::from(vec![Ident::new(table_flag)])), - table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), + table_flag: Some(ObjectName(vec![Ident::new(table_flag)])), + table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), has_as: false, options: vec![ SqlOption::KeyValue { key: Ident::with_quote('\'', "K1"), - value: Expr::Value((Value::SingleQuotedString("V1".into())).with_empty_span()), + value: Expr::Value(Value::SingleQuotedString("V1".into())), }, SqlOption::KeyValue { key: Ident::with_quote('\'', "K2"), - value: Expr::value(number("0.88")), + value: Expr::Value(number("0.88")), }, ], query: None, @@ -10460,20 +8959,20 @@ fn parse_cache_table() { format!( "CACHE {table_flag} TABLE '{cache_table_name}' OPTIONS('K1' = 'V1', 'K2' = 0.88) {sql}", ) - .as_str() + .as_str() ), Statement::Cache { - table_flag: Some(ObjectName::from(vec![Ident::new(table_flag)])), - table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), + table_flag: Some(ObjectName(vec![Ident::new(table_flag)])), + table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), has_as: false, options: vec![ SqlOption::KeyValue { key: Ident::with_quote('\'', "K1"), - value: Expr::Value((Value::SingleQuotedString("V1".into())).with_empty_span()), + value: Expr::Value(Value::SingleQuotedString("V1".into())), }, SqlOption::KeyValue { key: Ident::with_quote('\'', "K2"), - value: Expr::value(number("0.88")), + value: Expr::Value(number("0.88")), }, ], query: Some(query.clone().into()), @@ -10485,20 +8984,20 @@ fn parse_cache_table() { format!( "CACHE {table_flag} TABLE '{cache_table_name}' OPTIONS('K1' = 'V1', 'K2' = 0.88) AS {sql}", ) - .as_str() + .as_str() ), Statement::Cache { - table_flag: Some(ObjectName::from(vec![Ident::new(table_flag)])), - table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), + table_flag: Some(ObjectName(vec![Ident::new(table_flag)])), + table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), has_as: true, options: vec![ SqlOption::KeyValue { key: Ident::with_quote('\'', "K1"), - value: Expr::Value((Value::SingleQuotedString("V1".into())).with_empty_span()), + value: Expr::Value(Value::SingleQuotedString("V1".into())), }, SqlOption::KeyValue { key: Ident::with_quote('\'', "K2"), - value: Expr::value(number("0.88")), + value: Expr::Value(number("0.88")), }, ], query: Some(query.clone().into()), @@ -10508,8 +9007,8 @@ fn parse_cache_table() { assert_eq!( verified_stmt(format!("CACHE {table_flag} TABLE '{cache_table_name}' {sql}").as_str()), Statement::Cache { - table_flag: Some(ObjectName::from(vec![Ident::new(table_flag)])), - table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), + table_flag: Some(ObjectName(vec![Ident::new(table_flag)])), + table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), has_as: false, options: vec![], query: Some(query.clone().into()), @@ -10519,8 +9018,8 @@ fn parse_cache_table() { assert_eq!( verified_stmt(format!("CACHE {table_flag} TABLE '{cache_table_name}' AS {sql}").as_str()), Statement::Cache { - table_flag: Some(ObjectName::from(vec![Ident::new(table_flag)])), - table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), + table_flag: Some(ObjectName(vec![Ident::new(table_flag)])), + table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), has_as: true, options: vec![], query: Some(query.into()), @@ -10583,7 +9082,7 @@ fn parse_uncache_table() { assert_eq!( verified_stmt("UNCACHE TABLE 'table_name'"), Statement::UNCache { - table_name: ObjectName::from(vec![Ident::with_quote('\'', "table_name")]), + table_name: ObjectName(vec![Ident::with_quote('\'', "table_name")]), if_exists: false, } ); @@ -10591,7 +9090,7 @@ fn parse_uncache_table() { assert_eq!( verified_stmt("UNCACHE TABLE IF EXISTS 'table_name'"), Statement::UNCache { - table_name: ObjectName::from(vec![Ident::with_quote('\'', "table_name")]), + table_name: ObjectName(vec![Ident::with_quote('\'', "table_name")]), if_exists: true, } ); @@ -10695,16 +9194,21 @@ fn parse_with_recursion_limit() { #[test] fn parse_escaped_string_with_unescape() { - fn assert_mysql_query_value(dialects: &TestedDialects, sql: &str, quoted: &str) { - match dialects.one_statement_parses_to(sql, "") { + fn assert_mysql_query_value(sql: &str, quoted: &str) { + let stmt = TestedDialects::new(vec![ + Box::new(MySqlDialect {}), + Box::new(BigQueryDialect {}), + Box::new(SnowflakeDialect {}), + ]) + .one_statement_parses_to(sql, ""); + + match stmt { Statement::Query(query) => match *query.body { SetExpr::Select(value) => { let expr = expr_from_projection(only(&value.projection)); assert_eq!( *expr, - Expr::Value( - (Value::SingleQuotedString(quoted.to_string())).with_empty_span() - ) + Expr::Value(Value::SingleQuotedString(quoted.to_string())) ); } _ => unreachable!(), @@ -10712,38 +9216,17 @@ fn parse_escaped_string_with_unescape() { _ => unreachable!(), }; } - - let escaping_dialects = - &all_dialects_where(|dialect| dialect.supports_string_literal_backslash_escape()); - let no_wildcard_exception = &all_dialects_where(|dialect| { - dialect.supports_string_literal_backslash_escape() && !dialect.ignores_wildcard_escapes() - }); - let with_wildcard_exception = &all_dialects_where(|dialect| { - dialect.supports_string_literal_backslash_escape() && dialect.ignores_wildcard_escapes() - }); - let sql = r"SELECT 'I\'m fine'"; - assert_mysql_query_value(escaping_dialects, sql, "I'm fine"); + assert_mysql_query_value(sql, "I'm fine"); let sql = r#"SELECT 'I''m fine'"#; - assert_mysql_query_value(escaping_dialects, sql, "I'm fine"); + assert_mysql_query_value(sql, "I'm fine"); let sql = r#"SELECT 'I\"m fine'"#; - assert_mysql_query_value(escaping_dialects, sql, "I\"m fine"); + assert_mysql_query_value(sql, "I\"m fine"); let sql = r"SELECT 'Testing: \0 \\ \% \_ \b \n \r \t \Z \a \h \ '"; - assert_mysql_query_value( - no_wildcard_exception, - sql, - "Testing: \0 \\ % _ \u{8} \n \r \t \u{1a} \u{7} h ", - ); - - // check MySQL doesn't remove backslash from escaped LIKE wildcards - assert_mysql_query_value( - with_wildcard_exception, - sql, - "Testing: \0 \\ \\% \\_ \u{8} \n \r \t \u{1a} \u{7} h ", - ); + assert_mysql_query_value(sql, "Testing: \0 \\ % _ \u{8} \n \r \t \u{1a} \u{7} h "); } #[test] @@ -10765,9 +9248,7 @@ fn parse_escaped_string_without_unescape() { let expr = expr_from_projection(only(&value.projection)); assert_eq!( *expr, - Expr::Value( - (Value::SingleQuotedString(quoted.to_string())).with_empty_span() - ) + Expr::Value(Value::SingleQuotedString(quoted.to_string())) ); } _ => unreachable!(), @@ -10816,7 +9297,7 @@ fn parse_pivot_table() { verified_only_select(sql).from[0].relation, Pivot { table: Box::new(TableFactor::Table { - name: ObjectName::from(vec![Ident::new("monthly_sales")]), + name: ObjectName(vec![Ident::new("monthly_sales")]), alias: Some(TableAlias { name: Ident::new("a"), columns: vec![] @@ -10826,9 +9307,6 @@ fn parse_pivot_table() { version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }), aggregate_functions: vec![ expected_function("a", None), @@ -10838,13 +9316,11 @@ fn parse_pivot_table() { value_column: vec![Ident::new("a"), Ident::new("MONTH")], value_source: PivotValueSource::List(vec![ ExprWithAlias { - expr: Expr::value(number("1")), + expr: Expr::Value(number("1")), alias: Some(Ident::new("x")) }, ExprWithAlias { - expr: Expr::Value( - (Value::SingleQuotedString("two".to_string())).with_empty_span() - ), + expr: Expr::Value(Value::SingleQuotedString("two".to_string())), alias: None }, ExprWithAlias { @@ -10856,13 +9332,9 @@ fn parse_pivot_table() { alias: Some(TableAlias { name: Ident { value: "p".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }, - columns: vec![ - TableAliasColumnDef::from_name("c"), - TableAliasColumnDef::from_name("d"), - ], + columns: vec![Ident::new("c"), Ident::new("d")], }), } ); @@ -10890,47 +9362,44 @@ fn parse_unpivot_table() { "SELECT * FROM sales AS s ", "UNPIVOT(quantity FOR quarter IN (Q1, Q2, Q3, Q4)) AS u (product, quarter, quantity)" ); - let base_unpivot = Unpivot { - table: Box::new(TableFactor::Table { - name: ObjectName::from(vec![Ident::new("sales")]), - alias: Some(TableAlias { - name: Ident::new("s"), - columns: vec![], - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - }), - null_inclusion: None, - value: Ident { - value: "quantity".to_string(), - quote_style: None, - span: Span::empty(), - }, - name: Ident { - value: "quarter".to_string(), - quote_style: None, - span: Span::empty(), - }, - columns: ["Q1", "Q2", "Q3", "Q4"] - .into_iter() - .map(Ident::new) - .collect(), - alias: Some(TableAlias { - name: Ident::new("u"), - columns: ["product", "quarter", "quantity"] + pretty_assertions::assert_eq!( + verified_only_select(sql).from[0].relation, + Unpivot { + table: Box::new(TableFactor::Table { + name: ObjectName(vec![Ident::new("sales")]), + alias: Some(TableAlias { + name: Ident::new("s"), + columns: vec![] + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }), + value: Ident { + value: "quantity".to_string(), + quote_style: None + }, + + name: Ident { + value: "quarter".to_string(), + quote_style: None + }, + columns: ["Q1", "Q2", "Q3", "Q4"] .into_iter() - .map(TableAliasColumnDef::from_name) + .map(Ident::new) .collect(), - }), - }; - pretty_assertions::assert_eq!(verified_only_select(sql).from[0].relation, base_unpivot); + alias: Some(TableAlias { + name: Ident::new("u"), + columns: ["product", "quarter", "quantity"] + .into_iter() + .map(Ident::new) + .collect() + }), + } + ); assert_eq!(verified_stmt(sql).to_string(), sql); let sql_without_aliases = concat!( @@ -10950,105 +9419,6 @@ fn parse_unpivot_table() { verified_stmt(sql_without_aliases).to_string(), sql_without_aliases ); - - let sql_unpivot_exclude_nulls = concat!( - "SELECT * FROM sales AS s ", - "UNPIVOT EXCLUDE NULLS (quantity FOR quarter IN (Q1, Q2, Q3, Q4)) AS u (product, quarter, quantity)" - ); - - if let Unpivot { null_inclusion, .. } = - &verified_only_select(sql_unpivot_exclude_nulls).from[0].relation - { - assert_eq!(*null_inclusion, Some(NullInclusion::ExcludeNulls)); - } - - assert_eq!( - verified_stmt(sql_unpivot_exclude_nulls).to_string(), - sql_unpivot_exclude_nulls - ); - - let sql_unpivot_include_nulls = concat!( - "SELECT * FROM sales AS s ", - "UNPIVOT INCLUDE NULLS (quantity FOR quarter IN (Q1, Q2, Q3, Q4)) AS u (product, quarter, quantity)" - ); - - if let Unpivot { null_inclusion, .. } = - &verified_only_select(sql_unpivot_include_nulls).from[0].relation - { - assert_eq!(*null_inclusion, Some(NullInclusion::IncludeNulls)); - } - - assert_eq!( - verified_stmt(sql_unpivot_include_nulls).to_string(), - sql_unpivot_include_nulls - ); -} - -#[test] -fn parse_select_table_with_index_hints() { - let supported_dialects = all_dialects_where(|d| d.supports_table_hints()); - let s = supported_dialects.verified_only_select( - "SELECT * FROM t1 USE INDEX (i1) IGNORE INDEX FOR ORDER BY (i2) ORDER BY a", - ); - if let TableFactor::Table { index_hints, .. } = &s.from[0].relation { - assert_eq!( - vec![ - TableIndexHints { - hint_type: TableIndexHintType::Use, - index_names: vec!["i1".into()], - index_type: TableIndexType::Index, - for_clause: None, - }, - TableIndexHints { - hint_type: TableIndexHintType::Ignore, - index_names: vec!["i2".into()], - index_type: TableIndexType::Index, - for_clause: Some(TableIndexHintForClause::OrderBy), - }, - ], - *index_hints - ); - } else { - panic!("Expected TableFactor::Table"); - } - supported_dialects.verified_stmt("SELECT * FROM t1 USE INDEX (i1) USE INDEX (i1, i1)"); - supported_dialects.verified_stmt( - "SELECT * FROM t1 USE INDEX () IGNORE INDEX (i2) USE INDEX (i1) USE INDEX (i2)", - ); - supported_dialects.verified_stmt("SELECT * FROM t1 FORCE INDEX FOR JOIN (i2)"); - supported_dialects.verified_stmt("SELECT * FROM t1 IGNORE INDEX FOR JOIN (i2)"); - supported_dialects.verified_stmt( - "SELECT * FROM t USE INDEX (index1) IGNORE INDEX FOR ORDER BY (index1) IGNORE INDEX FOR GROUP BY (index1) WHERE A = B", - ); - - // Test that dialects that don't support table hints will keep parsing the USE as table alias - let sql = "SELECT * FROM T USE LIMIT 1"; - let unsupported_dialects = all_dialects_where(|d| !d.supports_table_hints()); - let select = unsupported_dialects - .verified_only_select_with_canonical(sql, "SELECT * FROM T AS USE LIMIT 1"); - assert_eq!( - select.from, - vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![sqlparser::ast::ObjectNamePart::Identifier( - Ident::new("T") - )]), - alias: Some(TableAlias { - name: Ident::new("USE"), - columns: vec![], - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - }, - joins: vec![], - }] - ); } #[test] @@ -11064,7 +9434,7 @@ fn parse_pivot_unpivot_table() { Pivot { table: Box::new(Unpivot { table: Box::new(TableFactor::Table { - name: ObjectName::from(vec![Ident::new("census")]), + name: ObjectName(vec![Ident::new("census")]), alias: Some(TableAlias { name: Ident::new("c"), columns: vec![] @@ -11074,21 +9444,15 @@ fn parse_pivot_unpivot_table() { version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }), - null_inclusion: None, value: Ident { value: "population".to_string(), - quote_style: None, - span: Span::empty() + quote_style: None }, name: Ident { value: "year".to_string(), - quote_style: None, - span: Span::empty() + quote_style: None }, columns: ["population_2000", "population_2010"] .into_iter() @@ -11106,17 +9470,11 @@ fn parse_pivot_unpivot_table() { value_column: vec![Ident::new("year")], value_source: PivotValueSource::List(vec![ ExprWithAlias { - expr: Expr::Value( - (Value::SingleQuotedString("population_2000".to_string())) - .with_empty_span() - ), + expr: Expr::Value(Value::SingleQuotedString("population_2000".to_string())), alias: None }, ExprWithAlias { - expr: Expr::Value( - (Value::SingleQuotedString("population_2010".to_string())) - .with_empty_span() - ), + expr: Expr::Value(Value::SingleQuotedString("population_2010".to_string())), alias: None }, ]), @@ -11157,17 +9515,10 @@ fn parse_non_latin_identifiers() { Box::new(RedshiftSqlDialect {}), Box::new(MySqlDialect {}), ]); + supported_dialects.verified_stmt("SELECT a.説明 FROM test.public.inter01 AS a"); supported_dialects.verified_stmt("SELECT a.説明 FROM inter01 AS a, inter01_transactions AS b WHERE a.説明 = b.取引 GROUP BY a.説明"); supported_dialects.verified_stmt("SELECT 説明, hühnervögel, garçon, Москва, 東京 FROM inter01"); - - let supported_dialects = TestedDialects::new(vec![ - Box::new(GenericDialect {}), - Box::new(DuckDbDialect {}), - Box::new(PostgreSqlDialect {}), - Box::new(MsSqlDialect {}), - Box::new(MySqlDialect {}), - ]); assert!(supported_dialects .parse_sql_statements("SELECT 💝 FROM table1") .is_err()); @@ -11216,7 +9567,7 @@ fn parse_trailing_comma() { trailing_commas.verified_stmt(r#"SELECT "from" FROM "from""#); // doesn't allow any trailing commas - let trailing_commas = TestedDialects::new(vec![Box::new(PostgreSqlDialect {})]); + let trailing_commas = TestedDialects::new(vec![Box::new(GenericDialect {})]); assert_eq!( trailing_commas @@ -11240,19 +9591,15 @@ fn parse_trailing_comma() { "Expected: column name or constraint definition, found: )".to_string() ) ); - - let unsupported_dialects = all_dialects_where(|d| !d.supports_trailing_commas()); - assert_eq!( - unsupported_dialects - .parse_sql_statements("SELECT * FROM track ORDER BY milliseconds,") - .unwrap_err(), - ParserError::ParserError("Expected: an expression, found: EOF".to_string()) - ); } #[test] fn parse_projection_trailing_comma() { - let trailing_commas = all_dialects_where(|d| d.supports_projection_trailing_commas()); + // Some dialects allow trailing commas only in the projection + let trailing_commas = TestedDialects::new(vec![ + Box::new(SnowflakeDialect {}), + Box::new(BigQueryDialect {}), + ]); trailing_commas.one_statement_parses_to( "SELECT album_id, name, FROM track", @@ -11265,14 +9612,20 @@ fn parse_projection_trailing_comma() { trailing_commas.verified_stmt("SELECT DISTINCT ON (album_id) name FROM track"); - let unsupported_dialects = all_dialects_where(|d| { - !d.supports_projection_trailing_commas() && !d.supports_trailing_commas() - }); assert_eq!( - unsupported_dialects - .parse_sql_statements("SELECT album_id, name, FROM track") + trailing_commas + .parse_sql_statements("SELECT * FROM track ORDER BY milliseconds,") .unwrap_err(), - ParserError::ParserError("Expected an expression, found: FROM".to_string()) + ParserError::ParserError("Expected: an expression, found: EOF".to_string()) + ); + + assert_eq!( + trailing_commas + .parse_sql_statements("CREATE TABLE employees (name text, age int,)") + .unwrap_err(), + ParserError::ParserError( + "Expected: column name or constraint definition, found: )".to_string() + ), ); } @@ -11282,7 +9635,7 @@ fn parse_create_type() { verified_stmt("CREATE TYPE db.type_name AS (foo INT, bar TEXT COLLATE \"de_DE\")"); assert_eq!( Statement::CreateType { - name: ObjectName::from(vec![Ident::new("db"), Ident::new("type_name")]), + name: ObjectName(vec![Ident::new("db"), Ident::new("type_name")]), representation: UserDefinedTypeRepresentation::Composite { attributes: vec![ UserDefinedTypeCompositeAttributeDef { @@ -11293,7 +9646,7 @@ fn parse_create_type() { UserDefinedTypeCompositeAttributeDef { name: Ident::new("bar"), data_type: DataType::Text, - collation: Some(ObjectName::from(vec![Ident::with_quote('\"', "de_DE")])), + collation: Some(ObjectName(vec![Ident::with_quote('\"', "de_DE")])), } ] } @@ -11365,16 +9718,15 @@ fn parse_call() { assert_eq!( verified_stmt("CALL my_procedure('a')"), Statement::Call(Function { - uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("a".to_string())).with_empty_span() + Value::SingleQuotedString("a".to_string()) )))], clauses: vec![], }), - name: ObjectName::from(vec![Ident::new("my_procedure")]), + name: ObjectName(vec![Ident::new("my_procedure")]), filter: None, null_treatment: None, over: None, @@ -11383,90 +9735,9 @@ fn parse_call() { ); } -#[test] -fn parse_execute_stored_procedure() { - let expected = Statement::Execute { - name: Some(ObjectName::from(vec![ - Ident { - value: "my_schema".to_string(), - quote_style: None, - span: Span::empty(), - }, - Ident { - value: "my_stored_procedure".to_string(), - quote_style: None, - span: Span::empty(), - }, - ])), - parameters: vec![ - Expr::Value((Value::NationalStringLiteral("param1".to_string())).with_empty_span()), - Expr::Value((Value::NationalStringLiteral("param2".to_string())).with_empty_span()), - ], - has_parentheses: false, - immediate: false, - using: vec![], - into: vec![], - }; - assert_eq!( - // Microsoft SQL Server does not use parentheses around arguments for EXECUTE - ms_and_generic() - .verified_stmt("EXECUTE my_schema.my_stored_procedure N'param1', N'param2'"), - expected - ); - assert_eq!( - ms_and_generic().one_statement_parses_to( - "EXEC my_schema.my_stored_procedure N'param1', N'param2';", - "EXECUTE my_schema.my_stored_procedure N'param1', N'param2'", - ), - expected - ); -} - -#[test] -fn parse_execute_immediate() { - let dialects = all_dialects_where(|d| d.supports_execute_immediate()); - - let expected = Statement::Execute { - parameters: vec![Expr::Value( - (Value::SingleQuotedString("SELECT 1".to_string())).with_empty_span(), - )], - immediate: true, - using: vec![ExprWithAlias { - expr: Expr::value(number("1")), - alias: Some(Ident::new("b")), - }], - into: vec![Ident::new("a")], - name: None, - has_parentheses: false, - }; - - let stmt = dialects.verified_stmt("EXECUTE IMMEDIATE 'SELECT 1' INTO a USING 1 AS b"); - assert_eq!(expected, stmt); - - dialects.verified_stmt("EXECUTE IMMEDIATE 'SELECT 1' INTO a, b USING 1 AS x, y"); - dialects.verified_stmt("EXECUTE IMMEDIATE 'SELECT 1' USING 1 AS x, y"); - dialects.verified_stmt("EXECUTE IMMEDIATE 'SELECT 1' INTO a, b"); - dialects.verified_stmt("EXECUTE IMMEDIATE 'SELECT 1'"); - dialects.verified_stmt("EXECUTE 'SELECT 1'"); - - assert_eq!( - ParserError::ParserError("Expected: identifier, found: ,".to_string()), - dialects - .parse_sql_statements("EXECUTE IMMEDIATE 'SELECT 1' USING 1 AS, y") - .unwrap_err() - ); -} - #[test] fn parse_create_table_collate() { - all_dialects().verified_stmt("CREATE TABLE tbl (foo INT, bar TEXT COLLATE \"de_DE\")"); - // check ordering is preserved - all_dialects().verified_stmt( - "CREATE TABLE tbl (foo INT, bar TEXT CHARACTER SET utf8mb4 COLLATE \"de_DE\")", - ); - all_dialects().verified_stmt( - "CREATE TABLE tbl (foo INT, bar TEXT COLLATE \"de_DE\" CHARACTER SET utf8mb4)", - ); + pg_and_generic().verified_stmt("CREATE TABLE tbl (foo INT, bar TEXT COLLATE \"de_DE\")"); } #[test] @@ -11535,15 +9806,21 @@ fn parse_unload() { Statement::Unload { query: Box::new(Query { body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, projection: vec![UnnamedExpr(Expr::Identifier(Ident::new("cola"))),], - exclude: None, into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident::new("tab")])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident::new("tab")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }], lateral_views: vec![], @@ -11559,32 +9836,28 @@ fn parse_unload() { qualify: None, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), with: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, order_by: None, settings: None, format_clause: None, - pipe_operators: vec![], }), to: Ident { value: "s3://...".to_string(), - quote_style: Some('\''), - span: Span::empty(), + quote_style: Some('\'') }, with: vec![SqlOption::KeyValue { key: Ident { value: "format".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }, - value: Expr::Value( - (Value::SingleQuotedString("AVRO".to_string())).with_empty_span() - ) + value: Expr::Value(Value::SingleQuotedString("AVRO".to_string())) }] } ); @@ -11618,7 +9891,6 @@ fn test_comment_hash_syntax() { Box::new(BigQueryDialect {}), Box::new(SnowflakeDialect {}), Box::new(MySqlDialect {}), - Box::new(HiveDialect {}), ]); let sql = r#" # comment @@ -11640,15 +9912,14 @@ fn test_parse_inline_comment() { // [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable) match all_dialects_except(|d| d.is::()).verified_stmt(sql) { Statement::CreateTable(CreateTable { - columns, - table_options, - .. + columns, comment, .. }) => { assert_eq!( columns, vec![ColumnDef { name: Ident::new("id".to_string()), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: Comment("comment without equal".to_string()), @@ -11656,10 +9927,8 @@ fn test_parse_inline_comment() { }] ); assert_eq!( - table_options, - CreateTableOptions::Plain(vec![SqlOption::Comment(CommentDef::WithEq( - "comment with equal".to_string() - ))]) + comment.unwrap(), + CommentDef::WithEq("comment with equal".to_string()) ); } _ => unreachable!(), @@ -11687,39 +9956,20 @@ fn parse_map_access_expr() { Box::new(ClickHouseDialect {}), ]); let expr = dialects.verified_expr(sql); - let expected = Expr::CompoundFieldAccess { - root: Box::new(Expr::Identifier(Ident::with_span( - Span::new(Location::of(1, 1), Location::of(1, 6)), - "users", - ))), - access_chain: vec![ - AccessExpr::Subscript(Subscript::Index { - index: Expr::UnaryOp { + let expected = Expr::MapAccess { + column: Expr::Identifier(Ident::new("users")).into(), + keys: vec![ + MapAccessKey { + key: Expr::UnaryOp { op: UnaryOperator::Minus, - expr: Expr::value(number("1")).into(), + expr: Expr::Value(number("1")).into(), }, - }), - AccessExpr::Subscript(Subscript::Index { - index: Expr::Function(Function { - name: ObjectName::from(vec![Ident::with_span( - Span::new(Location::of(1, 11), Location::of(1, 22)), - "safe_offset", - )]), - parameters: FunctionArguments::None, - args: FunctionArguments::List(FunctionArgumentList { - duplicate_treatment: None, - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (number("2")).with_empty_span(), - )))], - clauses: vec![], - }), - filter: None, - null_treatment: None, - over: None, - within_group: vec![], - uses_odbc_syntax: false, - }), - }), + syntax: MapAccessSyntax::Bracket, + }, + MapAccessKey { + key: call("safe_offset", [Expr::Value(number("2"))]), + syntax: MapAccessSyntax::Bracket, + }, ], }; assert_eq!(expr, expected); @@ -11732,7 +9982,6 @@ fn parse_map_access_expr() { #[test] fn parse_connect_by() { let expect_query = Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, @@ -11741,9 +9990,16 @@ fn parse_connect_by() { SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("manager_id"))), SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("title"))), ], - exclude: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident::new("employees")])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident::new("employees")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }], into: None, @@ -11763,9 +10019,9 @@ fn parse_connect_by() { condition: Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("title"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - Value::SingleQuotedString("president".to_owned()).with_empty_span(), - )), + right: Box::new(Expr::Value(Value::SingleQuotedString( + "president".to_owned(), + ))), }, relationships: vec![Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("manager_id"))), @@ -11775,7 +10031,6 @@ fn parse_connect_by() { ))))), }], }), - flavor: SelectFlavor::Standard, }; let connect_by_1 = concat!( @@ -11814,7 +10069,6 @@ fn parse_connect_by() { assert_eq!( all_dialects_where(|d| d.supports_connect_by()).verified_only_select(connect_by_3), Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, @@ -11823,9 +10077,16 @@ fn parse_connect_by() { SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("manager_id"))), SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("title"))), ], - exclude: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident::new("employees")])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident::new("employees")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }], into: None, @@ -11834,7 +10095,7 @@ fn parse_connect_by() { selection: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("employee_id"))), op: BinaryOperator::NotEq, - right: Box::new(Expr::value(number("42"))), + right: Box::new(Expr::Value(number("42"))), }), group_by: GroupByExpr::Expressions(vec![], vec![]), cluster_by: vec![], @@ -11849,9 +10110,9 @@ fn parse_connect_by() { condition: Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("title"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("president".to_owned(),)).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString( + "president".to_owned(), + ))), }, relationships: vec![Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("manager_id"))), @@ -11861,7 +10122,6 @@ fn parse_connect_by() { ))))), }], }), - flavor: SelectFlavor::Standard, } ); @@ -11890,20 +10150,6 @@ fn parse_connect_by() { #[test] fn test_selective_aggregation() { - let testing_dialects = all_dialects_where(|d| d.supports_filter_during_aggregation()); - let expected_dialects: Vec> = vec![ - Box::new(PostgreSqlDialect {}), - Box::new(DatabricksDialect {}), - Box::new(HiveDialect {}), - Box::new(SQLiteDialect {}), - Box::new(DuckDbDialect {}), - Box::new(GenericDialect {}), - ]; - assert_eq!(testing_dialects.dialects.len(), expected_dialects.len()); - expected_dialects - .into_iter() - .for_each(|d| assert!(d.supports_filter_during_aggregation())); - let sql = concat!( "SELECT ", "ARRAY_AGG(name) FILTER (WHERE name IS NOT NULL), ", @@ -11911,11 +10157,12 @@ fn test_selective_aggregation() { "FROM region" ); assert_eq!( - testing_dialects.verified_only_select(sql).projection, + all_dialects_where(|d| d.supports_filter_during_aggregation()) + .verified_only_select(sql) + .projection, vec![ SelectItem::UnnamedExpr(Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("ARRAY_AGG")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("ARRAY_AGG")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -11933,8 +10180,7 @@ fn test_selective_aggregation() { })), SelectItem::ExprWithAlias { expr: Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("ARRAY_AGG")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("ARRAY_AGG")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -11946,9 +10192,7 @@ fn test_selective_aggregation() { filter: Some(Box::new(Expr::Like { negated: false, expr: Box::new(Expr::Identifier(Ident::new("name"))), - pattern: Box::new(Expr::Value( - (Value::SingleQuotedString("a%".to_owned())).with_empty_span() - )), + pattern: Box::new(Expr::Value(Value::SingleQuotedString("a%".to_owned()))), escape_char: None, any: false, })), @@ -11989,51 +10233,21 @@ fn test_group_by_grouping_sets() { ); } -#[test] -fn test_xmltable() { - all_dialects() - .verified_only_select("SELECT * FROM XMLTABLE('/root' PASSING data COLUMNS element TEXT)"); - - // Minimal meaningful working example: returns a single row with a single column named y containing the value z - all_dialects().verified_only_select( - "SELECT y FROM XMLTABLE('/X' PASSING 'z' COLUMNS y TEXT)", - ); - - // Test using subqueries - all_dialects().verified_only_select("SELECT y FROM XMLTABLE((SELECT '/X') PASSING (SELECT CAST('z' AS xml)) COLUMNS y TEXT PATH (SELECT 'y'))"); - - // NOT NULL - all_dialects().verified_only_select( - "SELECT y FROM XMLTABLE('/X' PASSING '' COLUMNS y TEXT NOT NULL)", - ); - - all_dialects().verified_only_select("SELECT * FROM XMLTABLE('/root/row' PASSING xmldata COLUMNS id INT PATH '@id', name TEXT PATH 'name/text()', value FLOAT PATH 'value')"); - - all_dialects().verified_only_select("SELECT * FROM XMLTABLE('//ROWS/ROW' PASSING data COLUMNS row_num FOR ORDINALITY, id INT PATH '@id', name TEXT PATH 'NAME' DEFAULT 'unnamed')"); - - // Example from https://www.postgresql.org/docs/15/functions-xml.html#FUNCTIONS-XML-PROCESSING - all_dialects().verified_only_select( - "SELECT xmltable.* FROM xmldata, XMLTABLE('//ROWS/ROW' PASSING data COLUMNS id INT PATH '@id', ordinality FOR ORDINALITY, \"COUNTRY_NAME\" TEXT, country_id TEXT PATH 'COUNTRY_ID', size_sq_km FLOAT PATH 'SIZE[@unit = \"sq_km\"]', size_other TEXT PATH 'concat(SIZE[@unit!=\"sq_km\"], \" \", SIZE[@unit!=\"sq_km\"]/@unit)', premier_name TEXT PATH 'PREMIER_NAME' DEFAULT 'not specified')" - ); - - // Example from DB2 docs without explicit PASSING clause: https://www.ibm.com/docs/en/db2/12.1.0?topic=xquery-simple-column-name-passing-xmlexists-xmlquery-xmltable - all_dialects().verified_only_select( - "SELECT X.* FROM T1, XMLTABLE('$CUSTLIST/customers/customerinfo' COLUMNS \"Cid\" BIGINT PATH '@Cid', \"Info\" XML PATH 'document{.}', \"History\" XML PATH 'NULL') AS X" - ); - - // Example from PostgreSQL with XMLNAMESPACES - all_dialects().verified_only_select( - "SELECT xmltable.* FROM XMLTABLE(XMLNAMESPACES('http://example.com/myns' AS x, 'http://example.com/b' AS \"B\"), '/x:example/x:item' PASSING (SELECT data FROM xmldata) COLUMNS foo INT PATH '@foo', bar INT PATH '@B:bar')" - ); -} - #[test] fn test_match_recognize() { use MatchRecognizePattern::*; use MatchRecognizeSymbol::*; use RepetitionQuantifier::*; - let table = table_from_name(ObjectName::from(vec![Ident::new("my_table")])); + let table = TableFactor::Table { + name: ObjectName(vec![Ident::new("my_table")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }; fn check(options: &str, expect: TableFactor) { let select = all_dialects_where(|d| d.supports_match_recognize()).verified_only_select( @@ -12062,10 +10276,8 @@ fn test_match_recognize() { partition_by: vec![Expr::Identifier(Ident::new("company"))], order_by: vec![OrderByExpr { expr: Expr::Identifier(Ident::new("price_date")), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, + asc: None, + nulls_first: None, with_fill: None, }], measures: vec![ @@ -12344,9 +10556,7 @@ fn test_select_wildcard_with_replace() { let expected = SelectItem::Wildcard(WildcardAdditionalOptions { opt_replace: Some(ReplaceSelectItem { items: vec![Box::new(ReplaceSelectElement { - expr: Expr::Value( - (Value::SingleQuotedString("widget".to_owned())).with_empty_span(), - ), + expr: Expr::Value(Value::SingleQuotedString("widget".to_owned())), column_name: Ident::new("item_name"), as_keyword: true, })], @@ -12365,13 +10575,13 @@ fn test_select_wildcard_with_replace() { expr: Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("quantity"))), op: BinaryOperator::Divide, - right: Box::new(Expr::value(number("2"))), + right: Box::new(Expr::Value(number("2"))), }, column_name: Ident::new("quantity"), as_keyword: true, }), Box::new(ReplaceSelectElement { - expr: Expr::value(number("3")), + expr: Expr::Value(number("3")), column_name: Ident::new("order_id"), as_keyword: true, }), @@ -12405,37 +10615,6 @@ fn insert_into_with_parentheses() { Box::new(GenericDialect {}), ]); dialects.verified_stmt("INSERT INTO t1 (id, name) (SELECT t2.id, t2.name FROM t2)"); - dialects.verified_stmt("INSERT INTO t1 (SELECT t2.id, t2.name FROM t2)"); - dialects.verified_stmt(r#"INSERT INTO t1 ("select", name) (SELECT t2.name FROM t2)"#); -} - -#[test] -fn parse_odbc_scalar_function() { - let select = verified_only_select("SELECT {fn my_func(1, 2)}"); - let Expr::Function(Function { - name, - uses_odbc_syntax, - args, - .. - }) = expr_from_projection(only(&select.projection)) - else { - unreachable!("expected function") - }; - assert_eq!(name, &ObjectName::from(vec![Ident::new("my_func")])); - assert!(uses_odbc_syntax); - matches!(args, FunctionArguments::List(l) if l.args.len() == 2); - - verified_stmt("SELECT {fn fna()} AS foo, fnb(1)"); - - // Testing invalid SQL with any-one dialect is intentional. - // Depending on dialect flags the error message may be different. - let pg = TestedDialects::new(vec![Box::new(PostgreSqlDialect {})]); - assert_eq!( - pg.parse_sql_statements("SELECT {fn2 my_func()}") - .unwrap_err() - .to_string(), - "sql parser error: Expected: an expression, found: {" - ); } #[test] @@ -12447,22 +10626,20 @@ fn test_dictionary_syntax() { ); } - check("{}", Expr::Dictionary(vec![])); - check( "{'Alberta': 'Edmonton', 'Manitoba': 'Winnipeg'}", Expr::Dictionary(vec![ DictionaryField { key: Ident::with_quote('\'', "Alberta"), - value: Box::new(Expr::Value( - (Value::SingleQuotedString("Edmonton".to_owned())).with_empty_span(), - )), + value: Box::new(Expr::Value(Value::SingleQuotedString( + "Edmonton".to_owned(), + ))), }, DictionaryField { key: Ident::with_quote('\'', "Manitoba"), - value: Box::new(Expr::Value( - (Value::SingleQuotedString("Winnipeg".to_owned())).with_empty_span(), - )), + value: Box::new(Expr::Value(Value::SingleQuotedString( + "Winnipeg".to_owned(), + ))), }, ]), ); @@ -12474,9 +10651,9 @@ fn test_dictionary_syntax() { key: Ident::with_quote('\'', "start"), value: Box::new(Expr::Cast { kind: CastKind::Cast, - expr: Box::new(Expr::Value( - (Value::SingleQuotedString("2023-04-01".to_owned())).with_empty_span(), - )), + expr: Box::new(Expr::Value(Value::SingleQuotedString( + "2023-04-01".to_owned(), + ))), data_type: DataType::Timestamp(None, TimezoneInfo::None), format: None, }), @@ -12485,9 +10662,9 @@ fn test_dictionary_syntax() { key: Ident::with_quote('\'', "end"), value: Box::new(Expr::Cast { kind: CastKind::Cast, - expr: Box::new(Expr::Value( - (Value::SingleQuotedString("2023-04-05".to_owned())).with_empty_span(), - )), + expr: Box::new(Expr::Value(Value::SingleQuotedString( + "2023-04-05".to_owned(), + ))), data_type: DataType::Timestamp(None, TimezoneInfo::None), format: None, }), @@ -12510,27 +10687,25 @@ fn test_map_syntax() { Expr::Map(Map { entries: vec![ MapEntry { - key: Box::new(Expr::Value( - (Value::SingleQuotedString("Alberta".to_owned())).with_empty_span(), - )), - value: Box::new(Expr::Value( - (Value::SingleQuotedString("Edmonton".to_owned())).with_empty_span(), - )), + key: Box::new(Expr::Value(Value::SingleQuotedString("Alberta".to_owned()))), + value: Box::new(Expr::Value(Value::SingleQuotedString( + "Edmonton".to_owned(), + ))), }, MapEntry { - key: Box::new(Expr::Value( - (Value::SingleQuotedString("Manitoba".to_owned())).with_empty_span(), - )), - value: Box::new(Expr::Value( - (Value::SingleQuotedString("Winnipeg".to_owned())).with_empty_span(), - )), + key: Box::new(Expr::Value(Value::SingleQuotedString( + "Manitoba".to_owned(), + ))), + value: Box::new(Expr::Value(Value::SingleQuotedString( + "Winnipeg".to_owned(), + ))), }, ], }), ); fn number_expr(s: &str) -> Expr { - Expr::value(number(s)) + Expr::Value(number(s)) } check( @@ -12558,14 +10733,14 @@ fn test_map_syntax() { elem: vec![number_expr("1"), number_expr("2"), number_expr("3")], named: false, })), - value: Box::new(Expr::value(number("10.0"))), + value: Box::new(Expr::Value(number("10.0"))), }, MapEntry { key: Box::new(Expr::Array(Array { elem: vec![number_expr("4"), number_expr("5"), number_expr("6")], named: false, })), - value: Box::new(Expr::value(number("20.0"))), + value: Box::new(Expr::Value(number("20.0"))), }, ], }), @@ -12573,26 +10748,22 @@ fn test_map_syntax() { check( "MAP {'a': 10, 'b': 20}['a']", - Expr::CompoundFieldAccess { - root: Box::new(Expr::Map(Map { + Expr::Subscript { + expr: Box::new(Expr::Map(Map { entries: vec![ MapEntry { - key: Box::new(Expr::Value( - (Value::SingleQuotedString("a".to_owned())).with_empty_span(), - )), + key: Box::new(Expr::Value(Value::SingleQuotedString("a".to_owned()))), value: Box::new(number_expr("10")), }, MapEntry { - key: Box::new(Expr::Value( - (Value::SingleQuotedString("b".to_owned())).with_empty_span(), - )), + key: Box::new(Expr::Value(Value::SingleQuotedString("b".to_owned()))), value: Box::new(number_expr("20")), }, ], })), - access_chain: vec![AccessExpr::Subscript(Subscript::Index { - index: Expr::Value((Value::SingleQuotedString("a".to_owned())).with_empty_span()), - })], + subscript: Box::new(Subscript::Index { + index: Expr::Value(Value::SingleQuotedString("a".to_owned())), + }), }, ); @@ -12686,12 +10857,27 @@ fn parse_select_wildcard_with_except() { ); } +#[test] +fn parse_auto_increment_too_large() { + let dialect = GenericDialect {}; + let u64_max = u64::MAX; + let sql = + format!("CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) AUTO_INCREMENT=1{u64_max}"); + + let res = Parser::new(&dialect) + .try_with_sql(&sql) + .expect("tokenize to work") + .parse_statements(); + + assert!(res.is_err(), "{res:?}"); +} + #[test] fn test_group_by_nothing() { let Select { group_by, .. } = all_dialects_where(|d| d.supports_group_by_expr()) .verified_only_select("SELECT count(1) FROM t GROUP BY ()"); { - assert_eq!( + std::assert_eq!( GroupByExpr::Expressions(vec![Expr::Tuple(vec![])], vec![]), group_by ); @@ -12700,7 +10886,7 @@ fn test_group_by_nothing() { let Select { group_by, .. } = all_dialects_where(|d| d.supports_group_by_expr()) .verified_only_select("SELECT name, count(1) FROM t GROUP BY name, ()"); { - assert_eq!( + std::assert_eq!( GroupByExpr::Expressions( vec![ Identifier(Ident::new("name".to_string())), @@ -12716,76 +10902,26 @@ fn test_group_by_nothing() { #[test] fn test_extract_seconds_ok() { let dialects = all_dialects_where(|d| d.allow_extract_custom()); - let stmt = dialects.verified_expr("EXTRACT(SECONDS FROM '2 seconds'::INTERVAL)"); + let stmt = dialects.verified_expr("EXTRACT(seconds FROM '2 seconds'::INTERVAL)"); assert_eq!( stmt, Expr::Extract { - field: Seconds, + field: DateTimeField::Custom(Ident { + value: "seconds".to_string(), + quote_style: None, + }), syntax: ExtractSyntax::From, expr: Box::new(Expr::Cast { kind: CastKind::DoubleColon, - expr: Box::new(Expr::Value( - (Value::SingleQuotedString("2 seconds".to_string())).with_empty_span() - )), + expr: Box::new(Expr::Value(Value::SingleQuotedString( + "2 seconds".to_string() + ))), data_type: DataType::Interval, format: None, }), } - ); - - let actual_ast = dialects - .parse_sql_statements("SELECT EXTRACT(seconds FROM '2 seconds'::INTERVAL)") - .unwrap(); - - let expected_ast = vec![Statement::Query(Box::new(Query { - with: None, - body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), - distinct: None, - top: None, - top_before_distinct: false, - projection: vec![UnnamedExpr(Expr::Extract { - field: Seconds, - syntax: ExtractSyntax::From, - expr: Box::new(Expr::Cast { - kind: CastKind::DoubleColon, - expr: Box::new(Expr::Value( - (Value::SingleQuotedString("2 seconds".to_string())).with_empty_span(), - )), - data_type: DataType::Interval, - format: None, - }), - })], - exclude: None, - into: None, - from: vec![], - lateral_views: vec![], - prewhere: None, - selection: None, - group_by: GroupByExpr::Expressions(vec![], vec![]), - cluster_by: vec![], - distribute_by: vec![], - sort_by: vec![], - having: None, - named_window: vec![], - qualify: None, - window_before_qualify: false, - value_table_mode: None, - connect_by: None, - flavor: SelectFlavor::Standard, - }))), - order_by: None, - limit_clause: None, - fetch: None, - locks: vec![], - for_clause: None, - settings: None, - format_clause: None, - pipe_operators: vec![], - }))]; - - assert_eq!(actual_ast, expected_ast); + ) } #[test] @@ -12799,14 +10935,13 @@ fn test_extract_seconds_single_quote_ok() { field: DateTimeField::Custom(Ident { value: "seconds".to_string(), quote_style: Some('\''), - span: Span::empty(), }), syntax: ExtractSyntax::From, expr: Box::new(Expr::Cast { kind: CastKind::DoubleColon, - expr: Box::new(Expr::Value( - (Value::SingleQuotedString("2 seconds".to_string())).with_empty_span() - )), + expr: Box::new(Expr::Value(Value::SingleQuotedString( + "2 seconds".to_string() + ))), data_type: DataType::Interval, format: None, }), @@ -12814,6 +10949,17 @@ fn test_extract_seconds_single_quote_ok() { ) } +#[test] +fn test_extract_seconds_err() { + let sql = "SELECT EXTRACT(seconds FROM '2 seconds'::INTERVAL)"; + let dialects = all_dialects_except(|d| d.allow_extract_custom()); + let err = dialects.parse_sql_statements(sql).unwrap_err(); + assert_eq!( + err.to_string(), + "sql parser error: Expected: date/time field, found: seconds" + ); +} + #[test] fn test_extract_seconds_single_quote_err() { let sql = r#"SELECT EXTRACT('seconds' FROM '2 seconds'::INTERVAL)"#; @@ -12857,11 +11003,11 @@ fn parse_explain_with_option_list() { Some(vec![ UtilityOption { name: Ident::new("ANALYZE"), - arg: Some(Expr::Value((Value::Boolean(false)).with_empty_span())), + arg: Some(Expr::Value(Value::Boolean(false))), }, UtilityOption { name: Ident::new("VERBOSE"), - arg: Some(Expr::Value((Value::Boolean(true)).with_empty_span())), + arg: Some(Expr::Value(Value::Boolean(true))), }, ]), ); @@ -12897,9 +11043,7 @@ fn parse_explain_with_option_list() { }, UtilityOption { name: Ident::new("FORMAT2"), - arg: Some(Expr::Value( - (Value::SingleQuotedString("JSON".to_string())).with_empty_span(), - )), + arg: Some(Expr::Value(Value::SingleQuotedString("JSON".to_string()))), }, UtilityOption { name: Ident::new("FORMAT3"), @@ -12921,26 +11065,20 @@ fn parse_explain_with_option_list() { Some(vec![ UtilityOption { name: Ident::new("NUM1"), - arg: Some(Expr::Value( - (Value::Number("10".parse().unwrap(), false)).with_empty_span(), - )), + arg: Some(Expr::Value(Value::Number("10".parse().unwrap(), false))), }, UtilityOption { name: Ident::new("NUM2"), arg: Some(Expr::UnaryOp { op: UnaryOperator::Plus, - expr: Box::new(Expr::Value( - (Value::Number("10.1".parse().unwrap(), false)).with_empty_span(), - )), + expr: Box::new(Expr::Value(Value::Number("10.1".parse().unwrap(), false))), }), }, UtilityOption { name: Ident::new("NUM3"), arg: Some(Expr::UnaryOp { op: UnaryOperator::Minus, - expr: Box::new(Expr::Value( - (Value::Number("10.2".parse().unwrap(), false)).with_empty_span(), - )), + expr: Box::new(Expr::Value(Value::Number("10.2".parse().unwrap(), false))), }), }, ]), @@ -12953,7 +11091,7 @@ fn parse_explain_with_option_list() { }, UtilityOption { name: Ident::new("VERBOSE"), - arg: Some(Expr::Value((Value::Boolean(true)).with_empty_span())), + arg: Some(Expr::Value(Value::Boolean(true))), }, UtilityOption { name: Ident::new("WAL"), @@ -12967,13 +11105,11 @@ fn parse_explain_with_option_list() { name: Ident::new("USER_DEF_NUM"), arg: Some(Expr::UnaryOp { op: UnaryOperator::Minus, - expr: Box::new(Expr::Value( - (Value::Number("100.1".parse().unwrap(), false)).with_empty_span(), - )), + expr: Box::new(Expr::Value(Value::Number("100.1".parse().unwrap(), false))), }), }, ]; - run_explain_analyze( + run_explain_analyze ( all_dialects_where(|d| d.supports_explain_with_utility_options()), "EXPLAIN (ANALYZE, VERBOSE true, WAL OFF, FORMAT YAML, USER_DEF_NUM -100.1) SELECT sqrt(id) FROM foo", false, @@ -12985,11 +11121,13 @@ fn parse_explain_with_option_list() { #[test] fn test_create_policy() { - let sql: &str = "CREATE POLICY my_policy ON my_table \ - AS PERMISSIVE FOR SELECT \ - TO my_role, CURRENT_USER \ - USING (c0 = 1) \ - WITH CHECK (1 = 1)"; + let sql = concat!( + "CREATE POLICY my_policy ON my_table ", + "AS PERMISSIVE FOR SELECT ", + "TO my_role, CURRENT_USER ", + "USING (c0 = 1) ", + "WITH CHECK (true)" + ); match all_dialects().verified_stmt(sql) { Statement::CreatePolicy { @@ -13014,23 +11152,10 @@ fn test_create_policy() { Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("c0"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), - }) - ); - assert_eq!( - with_check, - Some(Expr::BinaryOp { - left: Box::new(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), - op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), + right: Box::new(Expr::Value(Value::Number("1".parse().unwrap(), false))), }) ); + assert_eq!(with_check, Some(Expr::Value(Value::Boolean(true)))); } _ => unreachable!(), } @@ -13041,7 +11166,7 @@ fn test_create_policy() { "AS PERMISSIVE FOR SELECT ", "TO my_role, CURRENT_USER ", "USING (c0 IN (SELECT column FROM t0)) ", - "WITH CHECK (1 = 1)" + "WITH CHECK (true)" )); // omit AS / FOR / TO / USING / WITH CHECK clauses is allowed all_dialects().verified_stmt("CREATE POLICY my_policy ON my_table"); @@ -13104,12 +11229,12 @@ fn test_drop_policy() { if_exists, name, table_name, - drop_behavior, + option, } => { assert_eq!(if_exists, true); assert_eq!(name.to_string(), "my_policy"); assert_eq!(table_name.to_string(), "my_table"); - assert_eq!(drop_behavior, Some(DropBehavior::Restrict)); + assert_eq!(option, Some(ReferentialAction::Restrict)); } _ => unreachable!(), } @@ -13218,183 +11343,6 @@ fn test_alter_policy() { ); } -#[test] -fn test_create_connector() { - let sql = "CREATE CONNECTOR my_connector \ - TYPE 'jdbc' \ - URL 'jdbc:mysql://localhost:3306/mydb' \ - WITH DCPROPERTIES('user' = 'root', 'password' = 'password')"; - let dialects = all_dialects(); - match dialects.verified_stmt(sql) { - Statement::CreateConnector(CreateConnector { - name, - connector_type, - url, - with_dcproperties, - .. - }) => { - assert_eq!(name.to_string(), "my_connector"); - assert_eq!(connector_type, Some("jdbc".to_string())); - assert_eq!(url, Some("jdbc:mysql://localhost:3306/mydb".to_string())); - assert_eq!( - with_dcproperties, - Some(vec![ - SqlOption::KeyValue { - key: Ident::with_quote('\'', "user"), - value: Expr::Value( - (Value::SingleQuotedString("root".to_string())).with_empty_span() - ) - }, - SqlOption::KeyValue { - key: Ident::with_quote('\'', "password"), - value: Expr::Value( - (Value::SingleQuotedString("password".to_string())).with_empty_span() - ) - } - ]) - ); - } - _ => unreachable!(), - } - - // omit IF NOT EXISTS/TYPE/URL/COMMENT/WITH DCPROPERTIES clauses is allowed - dialects.verified_stmt("CREATE CONNECTOR my_connector"); - - // missing connector name - assert_eq!( - dialects - .parse_sql_statements("CREATE CONNECTOR") - .unwrap_err() - .to_string(), - "sql parser error: Expected: identifier, found: EOF" - ); -} - -#[test] -fn test_drop_connector() { - let dialects = all_dialects(); - match dialects.verified_stmt("DROP CONNECTOR IF EXISTS my_connector") { - Statement::DropConnector { if_exists, name } => { - assert_eq!(if_exists, true); - assert_eq!(name.to_string(), "my_connector"); - } - _ => unreachable!(), - } - - // omit IF EXISTS is allowed - dialects.verified_stmt("DROP CONNECTOR my_connector"); - - // missing connector name - assert_eq!( - dialects - .parse_sql_statements("DROP CONNECTOR") - .unwrap_err() - .to_string(), - "sql parser error: Expected: identifier, found: EOF" - ); -} - -#[test] -fn test_alter_connector() { - let dialects = all_dialects(); - match dialects.verified_stmt( - "ALTER CONNECTOR my_connector SET DCPROPERTIES('user' = 'root', 'password' = 'password')", - ) { - Statement::AlterConnector { - name, - properties, - url, - owner, - } => { - assert_eq!(name.to_string(), "my_connector"); - assert_eq!( - properties, - Some(vec![ - SqlOption::KeyValue { - key: Ident::with_quote('\'', "user"), - value: Expr::Value( - (Value::SingleQuotedString("root".to_string())).with_empty_span() - ) - }, - SqlOption::KeyValue { - key: Ident::with_quote('\'', "password"), - value: Expr::Value( - (Value::SingleQuotedString("password".to_string())).with_empty_span() - ) - } - ]) - ); - assert_eq!(url, None); - assert_eq!(owner, None); - } - _ => unreachable!(), - } - - match dialects - .verified_stmt("ALTER CONNECTOR my_connector SET URL 'jdbc:mysql://localhost:3306/mydb'") - { - Statement::AlterConnector { - name, - properties, - url, - owner, - } => { - assert_eq!(name.to_string(), "my_connector"); - assert_eq!(properties, None); - assert_eq!(url, Some("jdbc:mysql://localhost:3306/mydb".to_string())); - assert_eq!(owner, None); - } - _ => unreachable!(), - } - - match dialects.verified_stmt("ALTER CONNECTOR my_connector SET OWNER USER 'root'") { - Statement::AlterConnector { - name, - properties, - url, - owner, - } => { - assert_eq!(name.to_string(), "my_connector"); - assert_eq!(properties, None); - assert_eq!(url, None); - assert_eq!( - owner, - Some(AlterConnectorOwner::User(Ident::with_quote('\'', "root"))) - ); - } - _ => unreachable!(), - } - - match dialects.verified_stmt("ALTER CONNECTOR my_connector SET OWNER ROLE 'admin'") { - Statement::AlterConnector { - name, - properties, - url, - owner, - } => { - assert_eq!(name.to_string(), "my_connector"); - assert_eq!(properties, None); - assert_eq!(url, None); - assert_eq!( - owner, - Some(AlterConnectorOwner::Role(Ident::with_quote('\'', "admin"))) - ); - } - _ => unreachable!(), - } - - // Wrong option name - assert_eq!( - dialects - .parse_sql_statements( - "ALTER CONNECTOR my_connector SET WRONG 'jdbc:mysql://localhost:3306/mydb'" - ) - .unwrap_err() - .to_string(), - "sql parser error: Expected: end of statement, found: WRONG" - ); -} - #[test] fn test_select_where_with_like_or_ilike_any() { verified_stmt(r#"SELECT * FROM x WHERE a ILIKE ANY '%abc%'"#); @@ -13439,128 +11387,30 @@ fn test_try_convert() { dialects.verified_expr("TRY_CONVERT('foo', VARCHAR(MAX))"); } -#[test] -fn parse_method_select() { - let _ = verified_only_select( - "SELECT LEFT('abc', 1).value('.', 'NVARCHAR(MAX)').value('.', 'NVARCHAR(MAX)') AS T", - ); - let _ = verified_only_select("SELECT STUFF((SELECT ',' + name FROM sys.objects FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 1, '') AS T"); - let _ = verified_only_select("SELECT CAST(column AS XML).value('.', 'NVARCHAR(MAX)') AS T"); - - // `CONVERT` support - let dialects = - all_dialects_where(|d| d.supports_try_convert() && d.convert_type_before_value()); - let _ = dialects.verified_only_select("SELECT CONVERT(XML, 'abc').value('.', 'NVARCHAR(MAX)').value('.', 'NVARCHAR(MAX)') AS T"); -} - -#[test] -fn parse_method_expr() { - let expr = - verified_expr("LEFT('abc', 1).value('.', 'NVARCHAR(MAX)').value('.', 'NVARCHAR(MAX)')"); - match expr { - Expr::CompoundFieldAccess { root, access_chain } => { - assert!(matches!(*root, Expr::Function(_))); - assert!(matches!( - access_chain[..], - [ - AccessExpr::Dot(Expr::Function(_)), - AccessExpr::Dot(Expr::Function(_)) - ] - )); - } - _ => unreachable!(), - } - - let expr = verified_expr( - "(SELECT ',' + name FROM sys.objects FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)')", - ); - match expr { - Expr::CompoundFieldAccess { root, access_chain } => { - assert!(matches!(*root, Expr::Subquery(_))); - assert!(matches!( - access_chain[..], - [AccessExpr::Dot(Expr::Function(_))] - )); - } - _ => unreachable!(), - } - let expr = verified_expr("CAST(column AS XML).value('.', 'NVARCHAR(MAX)')"); - match expr { - Expr::CompoundFieldAccess { root, access_chain } => { - assert!(matches!(*root, Expr::Cast { .. })); - assert!(matches!( - access_chain[..], - [AccessExpr::Dot(Expr::Function(_))] - )); - } - _ => unreachable!(), - } - - // `CONVERT` support - let dialects = - all_dialects_where(|d| d.supports_try_convert() && d.convert_type_before_value()); - let expr = dialects.verified_expr( - "CONVERT(XML, 'abc').value('.', 'NVARCHAR(MAX)').value('.', 'NVARCHAR(MAX)')", - ); - match expr { - Expr::CompoundFieldAccess { root, access_chain } => { - assert!(matches!(*root, Expr::Convert { .. })); - assert!(matches!( - access_chain[..], - [ - AccessExpr::Dot(Expr::Function(_)), - AccessExpr::Dot(Expr::Function(_)) - ] - )); - } - _ => unreachable!(), - } -} - #[test] fn test_show_dbs_schemas_tables_views() { - // These statements are parsed the same by all dialects - let stmts = vec![ - "SHOW DATABASES", - "SHOW SCHEMAS", - "SHOW TABLES", - "SHOW VIEWS", - "SHOW TABLES IN db1", - "SHOW VIEWS FROM db1", - "SHOW MATERIALIZED VIEWS", - "SHOW MATERIALIZED VIEWS IN db1", - "SHOW MATERIALIZED VIEWS FROM db1", - ]; - for stmt in stmts { - verified_stmt(stmt); - } - - // These statements are parsed the same by all dialects - // except for how the parser interprets the location of - // LIKE option (infix/suffix) - let stmts = vec!["SHOW DATABASES LIKE '%abc'", "SHOW SCHEMAS LIKE '%abc'"]; - for stmt in stmts { - all_dialects_where(|d| d.supports_show_like_before_in()).verified_stmt(stmt); - all_dialects_where(|d| !d.supports_show_like_before_in()).verified_stmt(stmt); - } - - // These statements are only parsed by dialects that - // support the LIKE option in the suffix - let stmts = vec![ - "SHOW TABLES IN db1 'abc'", - "SHOW VIEWS IN db1 'abc'", - "SHOW VIEWS FROM db1 'abc'", - "SHOW MATERIALIZED VIEWS IN db1 'abc'", - "SHOW MATERIALIZED VIEWS FROM db1 'abc'", - ]; - for stmt in stmts { - all_dialects_where(|d| !d.supports_show_like_before_in()).verified_stmt(stmt); - } + verified_stmt("SHOW DATABASES"); + verified_stmt("SHOW DATABASES LIKE '%abc'"); + verified_stmt("SHOW SCHEMAS"); + verified_stmt("SHOW SCHEMAS LIKE '%abc'"); + verified_stmt("SHOW TABLES"); + verified_stmt("SHOW TABLES IN db1"); + verified_stmt("SHOW TABLES IN db1 'abc'"); + verified_stmt("SHOW VIEWS"); + verified_stmt("SHOW VIEWS IN db1"); + verified_stmt("SHOW VIEWS IN db1 'abc'"); + verified_stmt("SHOW VIEWS FROM db1"); + verified_stmt("SHOW VIEWS FROM db1 'abc'"); + verified_stmt("SHOW MATERIALIZED VIEWS"); + verified_stmt("SHOW MATERIALIZED VIEWS IN db1"); + verified_stmt("SHOW MATERIALIZED VIEWS IN db1 'abc'"); + verified_stmt("SHOW MATERIALIZED VIEWS FROM db1"); + verified_stmt("SHOW MATERIALIZED VIEWS FROM db1 'abc'"); } #[test] fn parse_listen_channel() { - let dialects = all_dialects_where(|d| d.supports_listen_notify()); + let dialects = all_dialects_where(|d| d.supports_listen()); match dialects.verified_stmt("LISTEN test1") { Statement::LISTEN { channel } => { @@ -13574,7 +11424,7 @@ fn parse_listen_channel() { ParserError::ParserError("Expected: identifier, found: *".to_string()) ); - let dialects = all_dialects_where(|d| !d.supports_listen_notify()); + let dialects = all_dialects_where(|d| !d.supports_listen()); assert_eq!( dialects.parse_sql_statements("LISTEN test1").unwrap_err(), @@ -13582,40 +11432,9 @@ fn parse_listen_channel() { ); } -#[test] -fn parse_unlisten_channel() { - let dialects = all_dialects_where(|d| d.supports_listen_notify()); - - match dialects.verified_stmt("UNLISTEN test1") { - Statement::UNLISTEN { channel } => { - assert_eq!(Ident::new("test1"), channel); - } - _ => unreachable!(), - }; - - match dialects.verified_stmt("UNLISTEN *") { - Statement::UNLISTEN { channel } => { - assert_eq!(Ident::new("*"), channel); - } - _ => unreachable!(), - }; - - assert_eq!( - dialects.parse_sql_statements("UNLISTEN +").unwrap_err(), - ParserError::ParserError("Expected: wildcard or identifier, found: +".to_string()) - ); - - let dialects = all_dialects_where(|d| !d.supports_listen_notify()); - - assert_eq!( - dialects.parse_sql_statements("UNLISTEN test1").unwrap_err(), - ParserError::ParserError("Expected: an SQL statement, found: UNLISTEN".to_string()) - ); -} - #[test] fn parse_notify_channel() { - let dialects = all_dialects_where(|d| d.supports_listen_notify()); + let dialects = all_dialects_where(|d| d.supports_notify()); match dialects.verified_stmt("NOTIFY test1") { Statement::NOTIFY { channel, payload } => { @@ -13651,268 +11470,20 @@ fn parse_notify_channel() { "NOTIFY test1", "NOTIFY test1, 'this is a test notification'", ]; - let dialects = all_dialects_where(|d| !d.supports_listen_notify()); + let dialects = all_dialects_where(|d| !d.supports_notify()); for &sql in &sql_statements { assert_eq!( dialects.parse_sql_statements(sql).unwrap_err(), ParserError::ParserError("Expected: an SQL statement, found: NOTIFY".to_string()) ); + assert_eq!( + dialects.parse_sql_statements(sql).unwrap_err(), + ParserError::ParserError("Expected: an SQL statement, found: NOTIFY".to_string()) + ); } } -#[test] -fn parse_load_data() { - let dialects = all_dialects_where(|d| d.supports_load_data()); - let only_supports_load_extension_dialects = - all_dialects_where(|d| !d.supports_load_data() && d.supports_load_extension()); - let not_supports_load_dialects = - all_dialects_where(|d| !d.supports_load_data() && !d.supports_load_extension()); - - let sql = "LOAD DATA INPATH '/local/path/to/data.txt' INTO TABLE test.my_table"; - match dialects.verified_stmt(sql) { - Statement::LoadData { - local, - inpath, - overwrite, - table_name, - partitioned, - table_format, - } => { - assert_eq!(false, local); - assert_eq!("/local/path/to/data.txt", inpath); - assert_eq!(false, overwrite); - assert_eq!( - ObjectName::from(vec![Ident::new("test"), Ident::new("my_table")]), - table_name - ); - assert_eq!(None, partitioned); - assert_eq!(None, table_format); - } - _ => unreachable!(), - }; - - // with OVERWRITE keyword - let sql = "LOAD DATA INPATH '/local/path/to/data.txt' OVERWRITE INTO TABLE my_table"; - match dialects.verified_stmt(sql) { - Statement::LoadData { - local, - inpath, - overwrite, - table_name, - partitioned, - table_format, - } => { - assert_eq!(false, local); - assert_eq!("/local/path/to/data.txt", inpath); - assert_eq!(true, overwrite); - assert_eq!(ObjectName::from(vec![Ident::new("my_table")]), table_name); - assert_eq!(None, partitioned); - assert_eq!(None, table_format); - } - _ => unreachable!(), - }; - - assert_eq!( - only_supports_load_extension_dialects - .parse_sql_statements(sql) - .unwrap_err(), - ParserError::ParserError("Expected: end of statement, found: INPATH".to_string()) - ); - assert_eq!( - not_supports_load_dialects - .parse_sql_statements(sql) - .unwrap_err(), - ParserError::ParserError( - "Expected: `DATA` or an extension name after `LOAD`, found: INPATH".to_string() - ) - ); - - // with LOCAL keyword - let sql = "LOAD DATA LOCAL INPATH '/local/path/to/data.txt' INTO TABLE test.my_table"; - match dialects.verified_stmt(sql) { - Statement::LoadData { - local, - inpath, - overwrite, - table_name, - partitioned, - table_format, - } => { - assert_eq!(true, local); - assert_eq!("/local/path/to/data.txt", inpath); - assert_eq!(false, overwrite); - assert_eq!( - ObjectName::from(vec![Ident::new("test"), Ident::new("my_table")]), - table_name - ); - assert_eq!(None, partitioned); - assert_eq!(None, table_format); - } - _ => unreachable!(), - }; - - assert_eq!( - only_supports_load_extension_dialects - .parse_sql_statements(sql) - .unwrap_err(), - ParserError::ParserError("Expected: end of statement, found: LOCAL".to_string()) - ); - assert_eq!( - not_supports_load_dialects - .parse_sql_statements(sql) - .unwrap_err(), - ParserError::ParserError( - "Expected: `DATA` or an extension name after `LOAD`, found: LOCAL".to_string() - ) - ); - - // with PARTITION clause - let sql = "LOAD DATA LOCAL INPATH '/local/path/to/data.txt' INTO TABLE my_table PARTITION (year = 2024, month = 11)"; - match dialects.verified_stmt(sql) { - Statement::LoadData { - local, - inpath, - overwrite, - table_name, - partitioned, - table_format, - } => { - assert_eq!(true, local); - assert_eq!("/local/path/to/data.txt", inpath); - assert_eq!(false, overwrite); - assert_eq!(ObjectName::from(vec![Ident::new("my_table")]), table_name); - assert_eq!( - Some(vec![ - Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("year"))), - op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Number("2024".parse().unwrap(), false)).with_empty_span() - )), - }, - Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("month"))), - op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Number("11".parse().unwrap(), false)).with_empty_span() - )), - } - ]), - partitioned - ); - assert_eq!(None, table_format); - } - _ => unreachable!(), - }; - - // with PARTITION clause - let sql = "LOAD DATA LOCAL INPATH '/local/path/to/data.txt' OVERWRITE INTO TABLE good.my_table PARTITION (year = 2024, month = 11) INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'"; - match dialects.verified_stmt(sql) { - Statement::LoadData { - local, - inpath, - overwrite, - table_name, - partitioned, - table_format, - } => { - assert_eq!(true, local); - assert_eq!("/local/path/to/data.txt", inpath); - assert_eq!(true, overwrite); - assert_eq!( - ObjectName::from(vec![Ident::new("good"), Ident::new("my_table")]), - table_name - ); - assert_eq!( - Some(vec![ - Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("year"))), - op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Number("2024".parse().unwrap(), false)).with_empty_span() - )), - }, - Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("month"))), - op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Number("11".parse().unwrap(), false)).with_empty_span() - )), - } - ]), - partitioned - ); - assert_eq!( - Some(HiveLoadDataFormat { - serde: Expr::Value( - (Value::SingleQuotedString( - "org.apache.hadoop.hive.serde2.OpenCSVSerde".to_string() - )) - .with_empty_span() - ), - input_format: Expr::Value( - (Value::SingleQuotedString( - "org.apache.hadoop.mapred.TextInputFormat".to_string() - )) - .with_empty_span() - ) - }), - table_format - ); - } - _ => unreachable!(), - }; - - // negative test case - let sql = "LOAD DATA2 LOCAL INPATH '/local/path/to/data.txt' INTO TABLE test.my_table"; - assert_eq!( - dialects.parse_sql_statements(sql).unwrap_err(), - ParserError::ParserError( - "Expected: `DATA` or an extension name after `LOAD`, found: DATA2".to_string() - ) - ); -} - -#[test] -fn test_load_extension() { - let dialects = all_dialects_where(|d| d.supports_load_extension()); - let not_supports_load_extension_dialects = all_dialects_where(|d| !d.supports_load_extension()); - let sql = "LOAD my_extension"; - - match dialects.verified_stmt(sql) { - Statement::Load { extension_name } => { - assert_eq!(Ident::new("my_extension"), extension_name); - } - _ => unreachable!(), - }; - - assert_eq!( - not_supports_load_extension_dialects - .parse_sql_statements(sql) - .unwrap_err(), - ParserError::ParserError( - "Expected: `DATA` or an extension name after `LOAD`, found: my_extension".to_string() - ) - ); - - let sql = "LOAD 'filename'"; - - match dialects.verified_stmt(sql) { - Statement::Load { extension_name } => { - assert_eq!( - Ident { - value: "filename".to_string(), - quote_style: Some('\''), - span: Span::empty(), - }, - extension_name - ); - } - _ => unreachable!(), - }; -} - #[test] fn test_select_top() { let dialects = all_dialects_where(|d| d.supports_top_before_distinct()); @@ -13922,2214 +11493,3 @@ fn test_select_top() { dialects.verified_stmt("SELECT TOP 3 DISTINCT * FROM tbl"); dialects.verified_stmt("SELECT TOP 3 DISTINCT a, b, c FROM tbl"); } - -#[test] -fn parse_bang_not() { - let dialects = all_dialects_where(|d| d.supports_bang_not_operator()); - let sql = "SELECT !a, !(b > 3)"; - let Select { projection, .. } = dialects.verified_only_select(sql); - - for (i, expr) in [ - Box::new(Expr::Identifier(Ident::new("a"))), - Box::new(Expr::Nested(Box::new(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("b"))), - op: BinaryOperator::Gt, - right: Box::new(Expr::Value( - Value::Number("3".parse().unwrap(), false).with_empty_span(), - )), - }))), - ] - .into_iter() - .enumerate() - { - assert_eq!( - SelectItem::UnnamedExpr(Expr::UnaryOp { - op: UnaryOperator::BangNot, - expr - }), - projection[i] - ) - } - - let sql_statements = ["SELECT a!", "SELECT a ! b", "SELECT a ! as b"]; - - for &sql in &sql_statements { - assert_eq!( - dialects.parse_sql_statements(sql).unwrap_err(), - ParserError::ParserError("No infix parser for token ExclamationMark".to_string()) - ); - } - - let sql_statements = ["SELECT !a", "SELECT !a b", "SELECT !a as b"]; - let dialects = all_dialects_where(|d| !d.supports_bang_not_operator()); - - for &sql in &sql_statements { - assert_eq!( - dialects.parse_sql_statements(sql).unwrap_err(), - ParserError::ParserError("Expected: an expression, found: !".to_string()) - ); - } -} - -#[test] -fn parse_factorial_operator() { - let dialects = all_dialects_where(|d| d.supports_factorial_operator()); - let sql = "SELECT a!, (b + c)!"; - let Select { projection, .. } = dialects.verified_only_select(sql); - - for (i, expr) in [ - Box::new(Expr::Identifier(Ident::new("a"))), - Box::new(Expr::Nested(Box::new(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("b"))), - op: BinaryOperator::Plus, - right: Box::new(Expr::Identifier(Ident::new("c"))), - }))), - ] - .into_iter() - .enumerate() - { - assert_eq!( - SelectItem::UnnamedExpr(Expr::UnaryOp { - op: UnaryOperator::PGPostfixFactorial, - expr - }), - projection[i] - ) - } - - let sql_statements = ["SELECT !a", "SELECT !a b", "SELECT !a as b"]; - - for &sql in &sql_statements { - assert_eq!( - dialects.parse_sql_statements(sql).unwrap_err(), - ParserError::ParserError("Expected: an expression, found: !".to_string()) - ); - } - - let sql_statements = ["SELECT a!", "SELECT a ! b", "SELECT a ! as b"]; - - // Due to the exclamation mark, which is both part of the `bang not` operator - // and the `factorial` operator, additional filtering not supports - // `bang not` operator is required here. - let dialects = - all_dialects_where(|d| !d.supports_factorial_operator() && !d.supports_bang_not_operator()); - - for &sql in &sql_statements { - assert_eq!( - dialects.parse_sql_statements(sql).unwrap_err(), - ParserError::ParserError("No infix parser for token ExclamationMark".to_string()) - ); - } - - // Due to the exclamation mark, which is both part of the `bang not` operator - // and the `factorial` operator, additional filtering supports - // `bang not` operator is required here. - let dialects = - all_dialects_where(|d| !d.supports_factorial_operator() && d.supports_bang_not_operator()); - - for &sql in &sql_statements { - assert_eq!( - dialects.parse_sql_statements(sql).unwrap_err(), - ParserError::ParserError("No infix parser for token ExclamationMark".to_string()) - ); - } -} - -#[test] -fn parse_comments() { - match all_dialects_where(|d| d.supports_comment_on()) - .verified_stmt("COMMENT ON COLUMN tab.name IS 'comment'") - { - Statement::Comment { - object_type, - object_name, - comment: Some(comment), - if_exists, - } => { - assert_eq!("comment", comment); - assert_eq!("tab.name", object_name.to_string()); - assert_eq!(CommentObject::Column, object_type); - assert!(!if_exists); - } - _ => unreachable!(), - } - - let object_types = [ - ("COLUMN", CommentObject::Column), - ("EXTENSION", CommentObject::Extension), - ("TABLE", CommentObject::Table), - ("SCHEMA", CommentObject::Schema), - ("DATABASE", CommentObject::Database), - ("USER", CommentObject::User), - ("ROLE", CommentObject::Role), - ]; - for (keyword, expected_object_type) in object_types.iter() { - match all_dialects_where(|d| d.supports_comment_on()) - .verified_stmt(format!("COMMENT IF EXISTS ON {keyword} db.t0 IS 'comment'").as_str()) - { - Statement::Comment { - object_type, - object_name, - comment: Some(comment), - if_exists, - } => { - assert_eq!("comment", comment); - assert_eq!("db.t0", object_name.to_string()); - assert_eq!(*expected_object_type, object_type); - assert!(if_exists); - } - _ => unreachable!(), - } - } - - match all_dialects_where(|d| d.supports_comment_on()) - .verified_stmt("COMMENT IF EXISTS ON TABLE public.tab IS NULL") - { - Statement::Comment { - object_type, - object_name, - comment: None, - if_exists, - } => { - assert_eq!("public.tab", object_name.to_string()); - assert_eq!(CommentObject::Table, object_type); - assert!(if_exists); - } - _ => unreachable!(), - } - - // missing IS statement - assert_eq!( - all_dialects_where(|d| d.supports_comment_on()) - .parse_sql_statements("COMMENT ON TABLE t0") - .unwrap_err(), - ParserError::ParserError("Expected: IS, found: EOF".to_string()) - ); - - // missing comment literal - assert_eq!( - all_dialects_where(|d| d.supports_comment_on()) - .parse_sql_statements("COMMENT ON TABLE t0 IS") - .unwrap_err(), - ParserError::ParserError("Expected: literal string, found: EOF".to_string()) - ); - - // unknown object type - assert_eq!( - all_dialects_where(|d| d.supports_comment_on()) - .parse_sql_statements("COMMENT ON UNKNOWN t0 IS 'comment'") - .unwrap_err(), - ParserError::ParserError("Expected: comment object_type, found: UNKNOWN".to_string()) - ); -} - -#[test] -fn parse_create_table_select() { - let dialects = all_dialects_where(|d| d.supports_create_table_select()); - let sql_1 = r#"CREATE TABLE foo (baz INT) SELECT bar"#; - let expected = r#"CREATE TABLE foo (baz INT) AS SELECT bar"#; - let _ = dialects.one_statement_parses_to(sql_1, expected); - - let sql_2 = r#"CREATE TABLE foo (baz INT, name STRING) SELECT bar, oth_name FROM test.table_a"#; - let expected = - r#"CREATE TABLE foo (baz INT, name STRING) AS SELECT bar, oth_name FROM test.table_a"#; - let _ = dialects.one_statement_parses_to(sql_2, expected); - - let dialects = all_dialects_where(|d| !d.supports_create_table_select()); - for sql in [sql_1, sql_2] { - assert_eq!( - dialects.parse_sql_statements(sql).unwrap_err(), - ParserError::ParserError("Expected: end of statement, found: SELECT".to_string()) - ); - } -} - -#[test] -fn test_reserved_keywords_for_identifiers() { - let dialects = all_dialects_where(|d| d.is_reserved_for_identifier(Keyword::INTERVAL)); - // Dialects that reserve the word INTERVAL will not allow it as an unquoted identifier - let sql = "SELECT MAX(interval) FROM tbl"; - assert_eq!( - dialects.parse_sql_statements(sql), - Err(ParserError::ParserError( - "Expected: an expression, found: )".to_string() - )) - ); - - // Dialects that do not reserve the word INTERVAL will allow it - let dialects = all_dialects_where(|d| !d.is_reserved_for_identifier(Keyword::INTERVAL)); - let sql = "SELECT MAX(interval) FROM tbl"; - dialects.parse_sql_statements(sql).unwrap(); -} - -#[test] -fn parse_create_table_with_bit_types() { - let sql = "CREATE TABLE t (a BIT, b BIT VARYING, c BIT(42), d BIT VARYING(43))"; - match verified_stmt(sql) { - Statement::CreateTable(CreateTable { columns, .. }) => { - assert_eq!(columns.len(), 4); - assert_eq!(columns[0].data_type, DataType::Bit(None)); - assert_eq!(columns[0].to_string(), "a BIT"); - assert_eq!(columns[1].data_type, DataType::BitVarying(None)); - assert_eq!(columns[1].to_string(), "b BIT VARYING"); - assert_eq!(columns[2].data_type, DataType::Bit(Some(42))); - assert_eq!(columns[2].to_string(), "c BIT(42)"); - assert_eq!(columns[3].data_type, DataType::BitVarying(Some(43))); - assert_eq!(columns[3].to_string(), "d BIT VARYING(43)"); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_composite_access_expr() { - assert_eq!( - verified_expr("f(a).b"), - Expr::CompoundFieldAccess { - root: Box::new(Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("f")]), - uses_odbc_syntax: false, - parameters: FunctionArguments::None, - args: FunctionArguments::List(FunctionArgumentList { - duplicate_treatment: None, - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr( - Expr::Identifier(Ident::new("a")) - ))], - clauses: vec![], - }), - null_treatment: None, - filter: None, - over: None, - within_group: vec![] - })), - access_chain: vec![AccessExpr::Dot(Expr::Identifier(Ident::new("b")))] - } - ); - - // Nested Composite Access - assert_eq!( - verified_expr("f(a).b.c"), - Expr::CompoundFieldAccess { - root: Box::new(Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("f")]), - uses_odbc_syntax: false, - parameters: FunctionArguments::None, - args: FunctionArguments::List(FunctionArgumentList { - duplicate_treatment: None, - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr( - Expr::Identifier(Ident::new("a")) - ))], - clauses: vec![], - }), - null_treatment: None, - filter: None, - over: None, - within_group: vec![] - })), - access_chain: vec![ - AccessExpr::Dot(Expr::Identifier(Ident::new("b"))), - AccessExpr::Dot(Expr::Identifier(Ident::new("c"))), - ] - } - ); - - // Composite Access in Select and Where Clauses - let stmt = verified_only_select("SELECT f(a).b FROM t WHERE f(a).b IS NOT NULL"); - let expr = Expr::CompoundFieldAccess { - root: Box::new(Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("f")]), - uses_odbc_syntax: false, - parameters: FunctionArguments::None, - args: FunctionArguments::List(FunctionArgumentList { - duplicate_treatment: None, - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr( - Expr::Identifier(Ident::new("a")), - ))], - clauses: vec![], - }), - null_treatment: None, - filter: None, - over: None, - within_group: vec![], - })), - access_chain: vec![AccessExpr::Dot(Expr::Identifier(Ident::new("b")))], - }; - - assert_eq!(stmt.projection[0], SelectItem::UnnamedExpr(expr.clone())); - assert_eq!(stmt.selection.unwrap(), Expr::IsNotNull(Box::new(expr))); - - // Compound access with quoted identifier. - all_dialects_where(|d| d.is_delimited_identifier_start('"')) - .verified_only_select("SELECT f(a).\"an id\""); - - // Composite Access in struct literal - all_dialects_where(|d| d.supports_struct_literal()).verified_stmt( - "SELECT * FROM t WHERE STRUCT(STRUCT(1 AS a, NULL AS b) AS c, NULL AS d).c.a IS NOT NULL", - ); - let support_struct = all_dialects_where(|d| d.supports_struct_literal()); - let stmt = support_struct - .verified_only_select("SELECT STRUCT(STRUCT(1 AS a, NULL AS b) AS c, NULL AS d).c.a"); - let expected = SelectItem::UnnamedExpr(Expr::CompoundFieldAccess { - root: Box::new(Expr::Struct { - values: vec![ - Expr::Named { - name: Ident::new("c"), - expr: Box::new(Expr::Struct { - values: vec![ - Expr::Named { - name: Ident::new("a"), - expr: Box::new(Expr::Value( - (Number("1".parse().unwrap(), false)).with_empty_span(), - )), - }, - Expr::Named { - name: Ident::new("b"), - expr: Box::new(Expr::Value((Value::Null).with_empty_span())), - }, - ], - fields: vec![], - }), - }, - Expr::Named { - name: Ident::new("d"), - expr: Box::new(Expr::Value((Value::Null).with_empty_span())), - }, - ], - fields: vec![], - }), - access_chain: vec![ - AccessExpr::Dot(Expr::Identifier(Ident::new("c"))), - AccessExpr::Dot(Expr::Identifier(Ident::new("a"))), - ], - }); - assert_eq!(stmt.projection[0], expected); -} - -#[test] -fn parse_create_table_with_enum_types() { - let sql = "CREATE TABLE t0 (foo ENUM8('a' = 1, 'b' = 2), bar ENUM16('a' = 1, 'b' = 2), baz ENUM('a', 'b'))"; - match all_dialects().verified_stmt(sql) { - Statement::CreateTable(CreateTable { name, columns, .. }) => { - assert_eq!(name.to_string(), "t0"); - assert_eq!( - vec![ - ColumnDef { - name: Ident::new("foo"), - data_type: DataType::Enum( - vec![ - EnumMember::NamedValue( - "a".to_string(), - Expr::Value( - (Number("1".parse().unwrap(), false)).with_empty_span() - ) - ), - EnumMember::NamedValue( - "b".to_string(), - Expr::Value( - (Number("2".parse().unwrap(), false)).with_empty_span() - ) - ) - ], - Some(8) - ), - options: vec![], - }, - ColumnDef { - name: Ident::new("bar"), - data_type: DataType::Enum( - vec![ - EnumMember::NamedValue( - "a".to_string(), - Expr::Value( - (Number("1".parse().unwrap(), false)).with_empty_span() - ) - ), - EnumMember::NamedValue( - "b".to_string(), - Expr::Value( - (Number("2".parse().unwrap(), false)).with_empty_span() - ) - ) - ], - Some(16) - ), - options: vec![], - }, - ColumnDef { - name: Ident::new("baz"), - data_type: DataType::Enum( - vec![ - EnumMember::Name("a".to_string()), - EnumMember::Name("b".to_string()) - ], - None - ), - options: vec![], - } - ], - columns - ); - } - _ => unreachable!(), - } - - // invalid case missing value for enum pair - assert_eq!( - all_dialects() - .parse_sql_statements("CREATE TABLE t0 (foo ENUM8('a' = 1, 'b' = ))") - .unwrap_err(), - ParserError::ParserError("Expected: a value, found: )".to_string()) - ); - - // invalid case that name is not a string - assert_eq!( - all_dialects() - .parse_sql_statements("CREATE TABLE t0 (foo ENUM8('a' = 1, 2))") - .unwrap_err(), - ParserError::ParserError("Expected: literal string, found: 2".to_string()) - ); -} - -#[test] -fn test_table_sample() { - let dialects = all_dialects_where(|d| d.supports_table_sample_before_alias()); - dialects.verified_stmt("SELECT * FROM tbl TABLESAMPLE (50) AS t"); - dialects.verified_stmt("SELECT * FROM tbl TABLESAMPLE (50 ROWS) AS t"); - dialects.verified_stmt("SELECT * FROM tbl TABLESAMPLE (50 PERCENT) AS t"); - - let dialects = all_dialects_where(|d| !d.supports_table_sample_before_alias()); - dialects.verified_stmt("SELECT * FROM tbl AS t TABLESAMPLE BERNOULLI (50)"); - dialects.verified_stmt("SELECT * FROM tbl AS t TABLESAMPLE SYSTEM (50)"); - dialects.verified_stmt("SELECT * FROM tbl AS t TABLESAMPLE SYSTEM (50) REPEATABLE (10)"); -} - -#[test] -fn overflow() { - let expr = std::iter::repeat_n("1", 1000) - .collect::>() - .join(" + "); - let sql = format!("SELECT {expr}"); - - let mut statements = Parser::parse_sql(&GenericDialect {}, sql.as_str()).unwrap(); - let statement = statements.pop().unwrap(); - assert_eq!(statement.to_string(), sql); -} -#[test] -fn parse_select_without_projection() { - let dialects = all_dialects_where(|d| d.supports_empty_projections()); - dialects.verified_stmt("SELECT FROM users"); -} - -#[test] -fn parse_update_from_before_select() { - verified_stmt("UPDATE t1 FROM (SELECT name, id FROM t1 GROUP BY id) AS t2 SET name = t2.name WHERE t1.id = t2.id"); - verified_stmt("UPDATE t1 FROM U, (SELECT id FROM V) AS W SET a = b WHERE 1 = 1"); - - let query = - "UPDATE t1 FROM (SELECT name, id FROM t1 GROUP BY id) AS t2 SET name = t2.name FROM (SELECT name from t2) AS t2"; - assert_eq!( - ParserError::ParserError("Expected: end of statement, found: FROM".to_string()), - parse_sql_statements(query).unwrap_err() - ); -} -#[test] -fn parse_overlaps() { - verified_stmt("SELECT (DATE '2016-01-10', DATE '2016-02-01') OVERLAPS (DATE '2016-01-20', DATE '2016-02-10')"); -} - -#[test] -fn parse_column_definition_trailing_commas() { - let dialects = all_dialects_where(|d| d.supports_column_definition_trailing_commas()); - - dialects.one_statement_parses_to("CREATE TABLE T (x INT64,)", "CREATE TABLE T (x INT64)"); - dialects.one_statement_parses_to( - "CREATE TABLE T (x INT64, y INT64, )", - "CREATE TABLE T (x INT64, y INT64)", - ); - dialects.one_statement_parses_to( - "CREATE VIEW T (x, y, ) AS SELECT 1", - "CREATE VIEW T (x, y) AS SELECT 1", - ); - - let unsupported_dialects = all_dialects_where(|d| { - !d.supports_projection_trailing_commas() && !d.supports_trailing_commas() - }); - assert_eq!( - unsupported_dialects - .parse_sql_statements("CREATE TABLE employees (name text, age int,)") - .unwrap_err(), - ParserError::ParserError( - "Expected: column name or constraint definition, found: )".to_string() - ), - ); -} - -#[test] -fn test_trailing_commas_in_from() { - let dialects = all_dialects_where(|d| d.supports_from_trailing_commas()); - dialects.verified_only_select_with_canonical("SELECT 1, 2 FROM t,", "SELECT 1, 2 FROM t"); - - dialects - .verified_only_select_with_canonical("SELECT 1, 2 FROM t1, t2,", "SELECT 1, 2 FROM t1, t2"); - - let sql = "SELECT a, FROM b, LIMIT 1"; - let _ = dialects.parse_sql_statements(sql).unwrap(); - - let sql = "INSERT INTO a SELECT b FROM c,"; - let _ = dialects.parse_sql_statements(sql).unwrap(); - - let sql = "SELECT a FROM b, HAVING COUNT(*) > 1"; - let _ = dialects.parse_sql_statements(sql).unwrap(); - - let sql = "SELECT a FROM b, WHERE c = 1"; - let _ = dialects.parse_sql_statements(sql).unwrap(); - - // nested - let sql = "SELECT 1, 2 FROM (SELECT * FROM t,),"; - let _ = dialects.parse_sql_statements(sql).unwrap(); - - // multiple_subqueries - dialects.verified_only_select_with_canonical( - "SELECT 1, 2 FROM (SELECT * FROM t1), (SELECT * FROM t2),", - "SELECT 1, 2 FROM (SELECT * FROM t1), (SELECT * FROM t2)", - ); -} - -#[test] -#[cfg(feature = "visitor")] -fn test_visit_order() { - let sql = "SELECT CASE a WHEN 1 THEN 2 WHEN 3 THEN 4 ELSE 5 END"; - let stmt = verified_stmt(sql); - let mut visited = vec![]; - let _ = sqlparser::ast::visit_expressions(&stmt, |expr| { - visited.push(expr.to_string()); - core::ops::ControlFlow::<()>::Continue(()) - }); - - assert_eq!( - visited, - [ - "CASE a WHEN 1 THEN 2 WHEN 3 THEN 4 ELSE 5 END", - "a", - "1", - "2", - "3", - "4", - "5" - ] - ); -} - -#[test] -fn parse_case_statement() { - let sql = "CASE 1 WHEN 2 THEN SELECT 1; SELECT 2; ELSE SELECT 3; END CASE"; - let Statement::Case(stmt) = verified_stmt(sql) else { - unreachable!() - }; - - assert_eq!(Some(Expr::value(number("1"))), stmt.match_expr); - assert_eq!( - Some(Expr::value(number("2"))), - stmt.when_blocks[0].condition - ); - assert_eq!(2, stmt.when_blocks[0].statements().len()); - assert_eq!(1, stmt.else_block.unwrap().statements().len()); - - verified_stmt(concat!( - "CASE 1", - " WHEN a THEN", - " SELECT 1; SELECT 2; SELECT 3;", - " WHEN b THEN", - " SELECT 4; SELECT 5;", - " ELSE", - " SELECT 7; SELECT 8;", - " END CASE" - )); - verified_stmt(concat!( - "CASE 1", - " WHEN a THEN", - " SELECT 1; SELECT 2; SELECT 3;", - " WHEN b THEN", - " SELECT 4; SELECT 5;", - " END CASE" - )); - verified_stmt(concat!( - "CASE 1", - " WHEN a THEN", - " SELECT 1; SELECT 2; SELECT 3;", - " END CASE" - )); - verified_stmt(concat!( - "CASE 1", - " WHEN a THEN", - " SELECT 1; SELECT 2; SELECT 3;", - " END" - )); - - assert_eq!( - ParserError::ParserError("Expected: THEN, found: END".to_string()), - parse_sql_statements("CASE 1 WHEN a END").unwrap_err() - ); - assert_eq!( - ParserError::ParserError("Expected: WHEN, found: ELSE".to_string()), - parse_sql_statements("CASE 1 ELSE SELECT 1; END").unwrap_err() - ); -} - -#[test] -fn test_case_statement_span() { - let sql = "CASE 1 WHEN 2 THEN SELECT 1; SELECT 2; ELSE SELECT 3; END CASE"; - let mut parser = Parser::new(&GenericDialect {}).try_with_sql(sql).unwrap(); - assert_eq!( - parser.parse_statement().unwrap().span(), - Span::new(Location::new(1, 1), Location::new(1, sql.len() as u64 + 1)) - ); -} - -#[test] -fn parse_if_statement() { - let dialects = all_dialects_except(|d| d.is::()); - - let sql = "IF 1 THEN SELECT 1; ELSEIF 2 THEN SELECT 2; ELSE SELECT 3; END IF"; - let Statement::If(IfStatement { - if_block, - elseif_blocks, - else_block, - .. - }) = dialects.verified_stmt(sql) - else { - unreachable!() - }; - assert_eq!(Some(Expr::value(number("1"))), if_block.condition); - assert_eq!(Some(Expr::value(number("2"))), elseif_blocks[0].condition); - assert_eq!(1, else_block.unwrap().statements().len()); - - dialects.verified_stmt(concat!( - "IF 1 THEN", - " SELECT 1;", - " SELECT 2;", - " SELECT 3;", - " ELSEIF 2 THEN", - " SELECT 4;", - " SELECT 5;", - " ELSEIF 3 THEN", - " SELECT 6;", - " SELECT 7;", - " ELSE", - " SELECT 8;", - " SELECT 9;", - " END IF" - )); - dialects.verified_stmt(concat!( - "IF 1 THEN", - " SELECT 1;", - " SELECT 2;", - " ELSE", - " SELECT 3;", - " SELECT 4;", - " END IF" - )); - dialects.verified_stmt(concat!( - "IF 1 THEN", - " SELECT 1;", - " SELECT 2;", - " SELECT 3;", - " ELSEIF 2 THEN", - " SELECT 3;", - " SELECT 4;", - " END IF" - )); - dialects.verified_stmt(concat!("IF 1 THEN", " SELECT 1;", " SELECT 2;", " END IF")); - dialects.verified_stmt(concat!( - "IF (1) THEN", - " SELECT 1;", - " SELECT 2;", - " END IF" - )); - dialects.verified_stmt("IF 1 THEN END IF"); - dialects.verified_stmt("IF 1 THEN SELECT 1; ELSEIF 1 THEN END IF"); - - assert_eq!( - ParserError::ParserError("Expected: IF, found: EOF".to_string()), - dialects - .parse_sql_statements("IF 1 THEN SELECT 1; ELSEIF 1 THEN SELECT 2; END") - .unwrap_err() - ); -} - -#[test] -fn test_if_statement_span() { - let sql = "IF 1=1 THEN SELECT 1; ELSEIF 1=2 THEN SELECT 2; ELSE SELECT 3; END IF"; - let mut parser = Parser::new(&GenericDialect {}).try_with_sql(sql).unwrap(); - assert_eq!( - parser.parse_statement().unwrap().span(), - Span::new(Location::new(1, 1), Location::new(1, sql.len() as u64 + 1)) - ); -} - -#[test] -fn test_if_statement_multiline_span() { - let sql_line1 = "IF 1 = 1 THEN SELECT 1;"; - let sql_line2 = "ELSEIF 1 = 2 THEN SELECT 2;"; - let sql_line3 = "ELSE SELECT 3;"; - let sql_line4 = "END IF"; - let sql = [sql_line1, sql_line2, sql_line3, sql_line4].join("\n"); - let mut parser = Parser::new(&GenericDialect {}).try_with_sql(&sql).unwrap(); - assert_eq!( - parser.parse_statement().unwrap().span(), - Span::new( - Location::new(1, 1), - Location::new(4, sql_line4.len() as u64 + 1) - ) - ); -} - -#[test] -fn test_conditional_statement_span() { - let sql = "IF 1=1 THEN SELECT 1; ELSEIF 1=2 THEN SELECT 2; ELSE SELECT 3; END IF"; - let mut parser = Parser::new(&GenericDialect {}).try_with_sql(sql).unwrap(); - match parser.parse_statement().unwrap() { - Statement::If(IfStatement { - if_block, - elseif_blocks, - else_block, - .. - }) => { - assert_eq!( - Span::new(Location::new(1, 1), Location::new(1, 21)), - if_block.span() - ); - assert_eq!( - Span::new(Location::new(1, 23), Location::new(1, 47)), - elseif_blocks[0].span() - ); - assert_eq!( - Span::new(Location::new(1, 49), Location::new(1, 62)), - else_block.unwrap().span() - ); - } - stmt => panic!("Unexpected statement: {stmt:?}"), - } -} - -#[test] -fn parse_raise_statement() { - let sql = "RAISE USING MESSAGE = 42"; - let Statement::Raise(stmt) = verified_stmt(sql) else { - unreachable!() - }; - assert_eq!( - Some(RaiseStatementValue::UsingMessage(Expr::value(number("42")))), - stmt.value - ); - - verified_stmt("RAISE USING MESSAGE = 'error'"); - verified_stmt("RAISE myerror"); - verified_stmt("RAISE 42"); - verified_stmt("RAISE using"); - verified_stmt("RAISE"); - - assert_eq!( - ParserError::ParserError("Expected: =, found: error".to_string()), - parse_sql_statements("RAISE USING MESSAGE error").unwrap_err() - ); -} - -#[test] -fn test_lambdas() { - let dialects = all_dialects_where(|d| d.supports_lambda_functions()); - - #[rustfmt::skip] - let sql = concat!( - "SELECT array_sort(array('Hello', 'World'), ", - "(p1, p2) -> CASE WHEN p1 = p2 THEN 0 ", - "WHEN reverse(p1) < reverse(p2) THEN -1 ", - "ELSE 1 END)", - ); - pretty_assertions::assert_eq!( - SelectItem::UnnamedExpr(call( - "array_sort", - [ - call( - "array", - [ - Expr::Value( - (Value::SingleQuotedString("Hello".to_owned())).with_empty_span() - ), - Expr::Value( - (Value::SingleQuotedString("World".to_owned())).with_empty_span() - ) - ] - ), - Expr::Lambda(LambdaFunction { - params: OneOrManyWithParens::Many(vec![Ident::new("p1"), Ident::new("p2")]), - body: Box::new(Expr::Case { - case_token: AttachedToken::empty(), - end_token: AttachedToken::empty(), - operand: None, - conditions: vec![ - CaseWhen { - condition: Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("p1"))), - op: BinaryOperator::Eq, - right: Box::new(Expr::Identifier(Ident::new("p2"))) - }, - result: Expr::value(number("0")), - }, - CaseWhen { - condition: Expr::BinaryOp { - left: Box::new(call( - "reverse", - [Expr::Identifier(Ident::new("p1"))] - )), - op: BinaryOperator::Lt, - right: Box::new(call( - "reverse", - [Expr::Identifier(Ident::new("p2"))] - )), - }, - result: Expr::UnaryOp { - op: UnaryOperator::Minus, - expr: Box::new(Expr::value(number("1"))) - } - }, - ], - else_result: Some(Box::new(Expr::value(number("1")))), - }) - }) - ] - )), - dialects.verified_only_select(sql).projection[0] - ); - - dialects.verified_expr( - "map_zip_with(map(1, 'a', 2, 'b'), map(1, 'x', 2, 'y'), (k, v1, v2) -> concat(v1, v2))", - ); - dialects.verified_expr("transform(array(1, 2, 3), x -> x + 1)"); -} - -#[test] -fn test_select_from_first() { - let dialects = all_dialects_where(|d| d.supports_from_first_select()); - let q1 = "FROM capitals"; - let q2 = "FROM capitals SELECT *"; - - for (q, flavor, projection) in [ - (q1, SelectFlavor::FromFirstNoSelect, vec![]), - ( - q2, - SelectFlavor::FromFirst, - vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())], - ), - ] { - let ast = dialects.verified_query(q); - let expected = Query { - with: None, - body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), - distinct: None, - top: None, - projection, - exclude: None, - top_before_distinct: false, - into: None, - from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident { - value: "capitals".to_string(), - quote_style: None, - span: Span::empty(), - }])), - joins: vec![], - }], - lateral_views: vec![], - prewhere: None, - selection: None, - group_by: GroupByExpr::Expressions(vec![], vec![]), - cluster_by: vec![], - distribute_by: vec![], - sort_by: vec![], - having: None, - named_window: vec![], - window_before_qualify: false, - qualify: None, - value_table_mode: None, - connect_by: None, - flavor, - }))), - order_by: None, - limit_clause: None, - fetch: None, - locks: vec![], - for_clause: None, - settings: None, - format_clause: None, - pipe_operators: vec![], - }; - assert_eq!(expected, ast); - assert_eq!(ast.to_string(), q); - } -} - -#[test] -fn test_geometric_unary_operators() { - // Number of points in path or polygon - let sql = "# path '((1,0),(0,1),(-1,0))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::UnaryOp { - op: UnaryOperator::Hash, - .. - } - )); - - // Length or circumference - let sql = "@-@ path '((0,0),(1,0))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::UnaryOp { - op: UnaryOperator::AtDashAt, - .. - } - )); - - // Center - let sql = "@@ circle '((0,0),10)'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::UnaryOp { - op: UnaryOperator::DoubleAt, - .. - } - )); - // Is horizontal? - let sql = "?- lseg '((-1,0),(1,0))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::UnaryOp { - op: UnaryOperator::QuestionDash, - .. - } - )); - - // Is vertical? - let sql = "?| lseg '((-1,0),(1,0))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::UnaryOp { - op: UnaryOperator::QuestionPipe, - .. - } - )); -} - -#[test] -fn test_geometry_type() { - let sql = "point '1,2'"; - assert_eq!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::TypedString { - data_type: DataType::GeometricType(GeometricTypeKind::Point), - value: ValueWithSpan { - value: Value::SingleQuotedString("1,2".to_string()), - span: Span::empty(), - }, - } - ); - - let sql = "line '1,2,3,4'"; - assert_eq!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::TypedString { - data_type: DataType::GeometricType(GeometricTypeKind::Line), - value: ValueWithSpan { - value: Value::SingleQuotedString("1,2,3,4".to_string()), - span: Span::empty(), - }, - } - ); - - let sql = "path '1,2,3,4'"; - assert_eq!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::TypedString { - data_type: DataType::GeometricType(GeometricTypeKind::GeometricPath), - value: ValueWithSpan { - value: Value::SingleQuotedString("1,2,3,4".to_string()), - span: Span::empty(), - }, - } - ); - let sql = "box '1,2,3,4'"; - assert_eq!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::TypedString { - data_type: DataType::GeometricType(GeometricTypeKind::GeometricBox), - value: ValueWithSpan { - value: Value::SingleQuotedString("1,2,3,4".to_string()), - span: Span::empty(), - }, - } - ); - - let sql = "circle '1,2,3'"; - assert_eq!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::TypedString { - data_type: DataType::GeometricType(GeometricTypeKind::Circle), - value: ValueWithSpan { - value: Value::SingleQuotedString("1,2,3".to_string()), - span: Span::empty(), - }, - } - ); - - let sql = "polygon '1,2,3,4'"; - assert_eq!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::TypedString { - data_type: DataType::GeometricType(GeometricTypeKind::Polygon), - value: ValueWithSpan { - value: Value::SingleQuotedString("1,2,3,4".to_string()), - span: Span::empty(), - }, - } - ); - let sql = "lseg '1,2,3,4'"; - assert_eq!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::TypedString { - data_type: DataType::GeometricType(GeometricTypeKind::LineSegment), - value: ValueWithSpan { - value: Value::SingleQuotedString("1,2,3,4".to_string()), - span: Span::empty(), - }, - } - ); -} -#[test] -fn test_geometric_binary_operators() { - // Translation plus - let sql = "box '((0,0),(1,1))' + point '(2.0,0)'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::Plus, - .. - } - )); - // Translation minus - let sql = "box '((0,0),(1,1))' - point '(2.0,0)'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::Minus, - .. - } - )); - - // Scaling multiply - let sql = "box '((0,0),(1,1))' * point '(2.0,0)'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::Multiply, - .. - } - )); - - // Scaling divide - let sql = "box '((0,0),(1,1))' / point '(2.0,0)'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::Divide, - .. - } - )); - - // Intersection - let sql = "'((1,-1),(-1,1))' # '((1,1),(-1,-1))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::PGBitwiseXor, - .. - } - )); - - //Point of closest proximity - let sql = "point '(0,0)' ## lseg '((2,0),(0,2))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::DoubleHash, - .. - } - )); - - // Point of closest proximity - let sql = "box '((0,0),(1,1))' && box '((0,0),(2,2))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::PGOverlap, - .. - } - )); - - // Overlaps to left? - let sql = "box '((0,0),(1,1))' &< box '((0,0),(2,2))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::AndLt, - .. - } - )); - - // Overlaps to right? - let sql = "box '((0,0),(3,3))' &> box '((0,0),(2,2))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::AndGt, - .. - } - )); - - // Distance between - let sql = "circle '((0,0),1)' <-> circle '((5,0),1)'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::LtDashGt, - .. - } - )); - - // Is left of? - let sql = "circle '((0,0),1)' << circle '((5,0),1)'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::PGBitwiseShiftLeft, - .. - } - )); - - // Is right of? - let sql = "circle '((5,0),1)' >> circle '((0,0),1)'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::PGBitwiseShiftRight, - .. - } - )); - - // Is below? - let sql = "circle '((0,0),1)' <^ circle '((0,5),1)'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::LtCaret, - .. - } - )); - - // Intersects or overlaps - let sql = "lseg '((-1,0),(1,0))' ?# box '((-2,-2),(2,2))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::QuestionHash, - .. - } - )); - - // Is horizontal? - let sql = "point '(1,0)' ?- point '(0,0)'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::QuestionDash, - .. - } - )); - - // Is perpendicular? - let sql = "lseg '((0,0),(0,1))' ?-| lseg '((0,0),(1,0))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::QuestionDashPipe, - .. - } - )); - - // Is vertical? - let sql = "point '(0,1)' ?| point '(0,0)'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::QuestionPipe, - .. - } - )); - - // Are parallel? - let sql = "lseg '((-1,0),(1,0))' ?|| lseg '((-1,2),(1,2))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::QuestionDoublePipe, - .. - } - )); - - // Contained or on? - let sql = "point '(1,1)' @ circle '((0,0),2)'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::At, - .. - } - )); - - // - // Same as? - let sql = "polygon '((0,0),(1,1))' ~= polygon '((1,1),(0,0))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::TildeEq, - .. - } - )); - - // Is strictly below? - let sql = "box '((0,0),(3,3))' <<| box '((3,4),(5,5))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::LtLtPipe, - .. - } - )); - - // Is strictly above? - let sql = "box '((3,4),(5,5))' |>> box '((0,0),(3,3))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::PipeGtGt, - .. - } - )); - - // Does not extend above? - let sql = "box '((0,0),(1,1))' &<| box '((0,0),(2,2))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::AndLtPipe, - .. - } - )); - - // Does not extend below? - let sql = "box '((0,0),(3,3))' |&> box '((0,0),(2,2))'"; - assert!(matches!( - all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), - Expr::BinaryOp { - op: BinaryOperator::PipeAndGt, - .. - } - )); -} - -#[test] -fn parse_array_type_def_with_brackets() { - let dialects = all_dialects_where(|d| d.supports_array_typedef_with_brackets()); - dialects.verified_stmt("SELECT x::INT[]"); - dialects.verified_stmt("SELECT STRING_TO_ARRAY('1,2,3', ',')::INT[3]"); -} - -#[test] -fn parse_set_names() { - let dialects = all_dialects_where(|d| d.supports_set_names()); - dialects.verified_stmt("SET NAMES 'UTF8'"); - dialects.verified_stmt("SET NAMES 'utf8'"); - dialects.verified_stmt("SET NAMES UTF8 COLLATE bogus"); -} - -#[test] -fn parse_pipeline_operator() { - let dialects = all_dialects_where(|d| d.supports_pipe_operator()); - - // select pipe operator - dialects.verified_stmt("SELECT * FROM users |> SELECT id"); - dialects.verified_stmt("SELECT * FROM users |> SELECT id, name"); - dialects.verified_query_with_canonical( - "SELECT * FROM users |> SELECT id user_id", - "SELECT * FROM users |> SELECT id AS user_id", - ); - dialects.verified_stmt("SELECT * FROM users |> SELECT id AS user_id"); - - // extend pipe operator - dialects.verified_stmt("SELECT * FROM users |> EXTEND id + 1 AS new_id"); - dialects.verified_stmt("SELECT * FROM users |> EXTEND id AS new_id, name AS new_name"); - dialects.verified_query_with_canonical( - "SELECT * FROM users |> EXTEND id user_id", - "SELECT * FROM users |> EXTEND id AS user_id", - ); - - // set pipe operator - dialects.verified_stmt("SELECT * FROM users |> SET id = id + 1"); - dialects.verified_stmt("SELECT * FROM users |> SET id = id + 1, name = name + ' Doe'"); - - // drop pipe operator - dialects.verified_stmt("SELECT * FROM users |> DROP id"); - dialects.verified_stmt("SELECT * FROM users |> DROP id, name"); - - // as pipe operator - dialects.verified_stmt("SELECT * FROM users |> AS new_users"); - - // limit pipe operator - dialects.verified_stmt("SELECT * FROM users |> LIMIT 10"); - dialects.verified_stmt("SELECT * FROM users |> LIMIT 10 OFFSET 5"); - dialects.verified_stmt("SELECT * FROM users |> LIMIT 10 |> LIMIT 5"); - dialects.verified_stmt("SELECT * FROM users |> LIMIT 10 |> WHERE true"); - - // where pipe operator - dialects.verified_stmt("SELECT * FROM users |> WHERE id = 1"); - dialects.verified_stmt("SELECT * FROM users |> WHERE id = 1 AND name = 'John'"); - dialects.verified_stmt("SELECT * FROM users |> WHERE id = 1 OR name = 'John'"); - - // aggregate pipe operator full table - dialects.verified_stmt("SELECT * FROM users |> AGGREGATE COUNT(*)"); - dialects.verified_query_with_canonical( - "SELECT * FROM users |> AGGREGATE COUNT(*) total_users", - "SELECT * FROM users |> AGGREGATE COUNT(*) AS total_users", - ); - dialects.verified_stmt("SELECT * FROM users |> AGGREGATE COUNT(*) AS total_users"); - dialects.verified_stmt("SELECT * FROM users |> AGGREGATE COUNT(*), MIN(id)"); - - // aggregate pipe opeprator with grouping - dialects.verified_stmt( - "SELECT * FROM users |> AGGREGATE SUM(o_totalprice) AS price, COUNT(*) AS cnt GROUP BY EXTRACT(YEAR FROM o_orderdate) AS year", - ); - dialects.verified_stmt( - "SELECT * FROM users |> AGGREGATE GROUP BY EXTRACT(YEAR FROM o_orderdate) AS year", - ); - dialects - .verified_stmt("SELECT * FROM users |> AGGREGATE GROUP BY EXTRACT(YEAR FROM o_orderdate)"); - dialects.verified_stmt("SELECT * FROM users |> AGGREGATE GROUP BY a, b"); - dialects.verified_stmt("SELECT * FROM users |> AGGREGATE SUM(c) GROUP BY a, b"); - dialects.verified_stmt("SELECT * FROM users |> AGGREGATE SUM(c) ASC"); - - // order by pipe operator - dialects.verified_stmt("SELECT * FROM users |> ORDER BY id ASC"); - dialects.verified_stmt("SELECT * FROM users |> ORDER BY id DESC"); - dialects.verified_stmt("SELECT * FROM users |> ORDER BY id DESC, name ASC"); - - // tablesample pipe operator - dialects.verified_stmt("SELECT * FROM tbl |> TABLESAMPLE BERNOULLI (50)"); - dialects.verified_stmt("SELECT * FROM tbl |> TABLESAMPLE SYSTEM (50 PERCENT)"); - dialects.verified_stmt("SELECT * FROM tbl |> TABLESAMPLE SYSTEM (50) REPEATABLE (10)"); - - // rename pipe operator - dialects.verified_stmt("SELECT * FROM users |> RENAME old_name AS new_name"); - dialects.verified_stmt("SELECT * FROM users |> RENAME id AS user_id, name AS user_name"); - dialects.verified_query_with_canonical( - "SELECT * FROM users |> RENAME id user_id", - "SELECT * FROM users |> RENAME id AS user_id", - ); - - // union pipe operator - dialects.verified_stmt("SELECT * FROM users |> UNION ALL (SELECT * FROM admins)"); - dialects.verified_stmt("SELECT * FROM users |> UNION DISTINCT (SELECT * FROM admins)"); - dialects.verified_stmt("SELECT * FROM users |> UNION (SELECT * FROM admins)"); - - // union pipe operator with multiple queries - dialects.verified_stmt( - "SELECT * FROM users |> UNION ALL (SELECT * FROM admins), (SELECT * FROM guests)", - ); - dialects.verified_stmt("SELECT * FROM users |> UNION DISTINCT (SELECT * FROM admins), (SELECT * FROM guests), (SELECT * FROM employees)"); - dialects.verified_stmt( - "SELECT * FROM users |> UNION (SELECT * FROM admins), (SELECT * FROM guests)", - ); - - // union pipe operator with BY NAME modifier - dialects.verified_stmt("SELECT * FROM users |> UNION BY NAME (SELECT * FROM admins)"); - dialects.verified_stmt("SELECT * FROM users |> UNION ALL BY NAME (SELECT * FROM admins)"); - dialects.verified_stmt("SELECT * FROM users |> UNION DISTINCT BY NAME (SELECT * FROM admins)"); - - // union pipe operator with BY NAME and multiple queries - dialects.verified_stmt( - "SELECT * FROM users |> UNION BY NAME (SELECT * FROM admins), (SELECT * FROM guests)", - ); - - // intersect pipe operator (BigQuery requires DISTINCT modifier for INTERSECT) - dialects.verified_stmt("SELECT * FROM users |> INTERSECT DISTINCT (SELECT * FROM admins)"); - - // intersect pipe operator with BY NAME modifier - dialects - .verified_stmt("SELECT * FROM users |> INTERSECT DISTINCT BY NAME (SELECT * FROM admins)"); - - // intersect pipe operator with multiple queries - dialects.verified_stmt( - "SELECT * FROM users |> INTERSECT DISTINCT (SELECT * FROM admins), (SELECT * FROM guests)", - ); - - // intersect pipe operator with BY NAME and multiple queries - dialects.verified_stmt("SELECT * FROM users |> INTERSECT DISTINCT BY NAME (SELECT * FROM admins), (SELECT * FROM guests)"); - - // except pipe operator (BigQuery requires DISTINCT modifier for EXCEPT) - dialects.verified_stmt("SELECT * FROM users |> EXCEPT DISTINCT (SELECT * FROM admins)"); - - // except pipe operator with BY NAME modifier - dialects.verified_stmt("SELECT * FROM users |> EXCEPT DISTINCT BY NAME (SELECT * FROM admins)"); - - // except pipe operator with multiple queries - dialects.verified_stmt( - "SELECT * FROM users |> EXCEPT DISTINCT (SELECT * FROM admins), (SELECT * FROM guests)", - ); - - // except pipe operator with BY NAME and multiple queries - dialects.verified_stmt("SELECT * FROM users |> EXCEPT DISTINCT BY NAME (SELECT * FROM admins), (SELECT * FROM guests)"); - - // call pipe operator - dialects.verified_stmt("SELECT * FROM users |> CALL my_function()"); - dialects.verified_stmt("SELECT * FROM users |> CALL process_data(5, 'test')"); - dialects.verified_stmt( - "SELECT * FROM users |> CALL namespace.function_name(col1, col2, 'literal')", - ); - - // call pipe operator with complex arguments - dialects.verified_stmt("SELECT * FROM users |> CALL transform_data(col1 + col2)"); - dialects.verified_stmt("SELECT * FROM users |> CALL analyze_data('param1', 100, true)"); - - // call pipe operator with aliases - dialects.verified_stmt("SELECT * FROM input_table |> CALL tvf1(arg1) AS al"); - dialects.verified_stmt("SELECT * FROM users |> CALL process_data(5) AS result_table"); - dialects.verified_stmt("SELECT * FROM users |> CALL namespace.func() AS my_alias"); - - // multiple call pipe operators in sequence - dialects.verified_stmt("SELECT * FROM input_table |> CALL tvf1(arg1) |> CALL tvf2(arg2, arg3)"); - dialects.verified_stmt( - "SELECT * FROM data |> CALL transform(col1) |> CALL validate() |> CALL process(param)", - ); - - // multiple call pipe operators with aliases - dialects.verified_stmt( - "SELECT * FROM input_table |> CALL tvf1(arg1) AS step1 |> CALL tvf2(arg2) AS step2", - ); - dialects.verified_stmt( - "SELECT * FROM data |> CALL preprocess() AS clean_data |> CALL analyze(mode) AS results", - ); - - // call pipe operators mixed with other pipe operators - dialects.verified_stmt( - "SELECT * FROM users |> CALL transform() |> WHERE status = 'active' |> CALL process(param)", - ); - dialects.verified_stmt( - "SELECT * FROM data |> CALL preprocess() AS clean |> SELECT col1, col2 |> CALL validate()", - ); - - // pivot pipe operator - dialects.verified_stmt( - "SELECT * FROM monthly_sales |> PIVOT(SUM(amount) FOR quarter IN ('Q1', 'Q2', 'Q3', 'Q4'))", - ); - dialects.verified_stmt("SELECT * FROM sales_data |> PIVOT(AVG(revenue) FOR region IN ('North', 'South', 'East', 'West'))"); - - // pivot pipe operator with multiple aggregate functions - dialects.verified_stmt("SELECT * FROM data |> PIVOT(SUM(sales) AS total_sales, COUNT(*) AS num_transactions FOR month IN ('Jan', 'Feb', 'Mar'))"); - - // pivot pipe operator with compound column names - dialects.verified_stmt("SELECT * FROM sales |> PIVOT(SUM(amount) FOR product.category IN ('Electronics', 'Clothing'))"); - - // pivot pipe operator mixed with other pipe operators - dialects.verified_stmt("SELECT * FROM sales_data |> WHERE year = 2023 |> PIVOT(SUM(revenue) FOR quarter IN ('Q1', 'Q2', 'Q3', 'Q4'))"); - - // pivot pipe operator with aliases - dialects.verified_stmt("SELECT * FROM monthly_sales |> PIVOT(SUM(sales) FOR quarter IN ('Q1', 'Q2')) AS quarterly_sales"); - dialects.verified_stmt("SELECT * FROM data |> PIVOT(AVG(price) FOR category IN ('A', 'B', 'C')) AS avg_by_category"); - dialects.verified_stmt("SELECT * FROM sales |> PIVOT(COUNT(*) AS transactions, SUM(amount) AS total FOR region IN ('North', 'South')) AS regional_summary"); - - // pivot pipe operator with implicit aliases (without AS keyword) - dialects.verified_query_with_canonical( - "SELECT * FROM monthly_sales |> PIVOT(SUM(sales) FOR quarter IN ('Q1', 'Q2')) quarterly_sales", - "SELECT * FROM monthly_sales |> PIVOT(SUM(sales) FOR quarter IN ('Q1', 'Q2')) AS quarterly_sales", - ); - dialects.verified_query_with_canonical( - "SELECT * FROM data |> PIVOT(AVG(price) FOR category IN ('A', 'B', 'C')) avg_by_category", - "SELECT * FROM data |> PIVOT(AVG(price) FOR category IN ('A', 'B', 'C')) AS avg_by_category", - ); - - // unpivot pipe operator basic usage - dialects - .verified_stmt("SELECT * FROM sales |> UNPIVOT(revenue FOR quarter IN (Q1, Q2, Q3, Q4))"); - dialects.verified_stmt("SELECT * FROM data |> UNPIVOT(value FOR category IN (A, B, C))"); - dialects.verified_stmt( - "SELECT * FROM metrics |> UNPIVOT(measurement FOR metric_type IN (cpu, memory, disk))", - ); - - // unpivot pipe operator with multiple columns - dialects.verified_stmt("SELECT * FROM quarterly_sales |> UNPIVOT(amount FOR period IN (jan, feb, mar, apr, may, jun))"); - dialects.verified_stmt( - "SELECT * FROM report |> UNPIVOT(score FOR subject IN (math, science, english, history))", - ); - - // unpivot pipe operator mixed with other pipe operators - dialects.verified_stmt("SELECT * FROM sales_data |> WHERE year = 2023 |> UNPIVOT(revenue FOR quarter IN (Q1, Q2, Q3, Q4))"); - - // unpivot pipe operator with aliases - dialects.verified_stmt("SELECT * FROM quarterly_sales |> UNPIVOT(amount FOR period IN (Q1, Q2)) AS unpivoted_sales"); - dialects.verified_stmt( - "SELECT * FROM data |> UNPIVOT(value FOR category IN (A, B, C)) AS transformed_data", - ); - dialects.verified_stmt("SELECT * FROM metrics |> UNPIVOT(measurement FOR metric_type IN (cpu, memory)) AS metric_measurements"); - - // unpivot pipe operator with implicit aliases (without AS keyword) - dialects.verified_query_with_canonical( - "SELECT * FROM quarterly_sales |> UNPIVOT(amount FOR period IN (Q1, Q2)) unpivoted_sales", - "SELECT * FROM quarterly_sales |> UNPIVOT(amount FOR period IN (Q1, Q2)) AS unpivoted_sales", - ); - dialects.verified_query_with_canonical( - "SELECT * FROM data |> UNPIVOT(value FOR category IN (A, B, C)) transformed_data", - "SELECT * FROM data |> UNPIVOT(value FOR category IN (A, B, C)) AS transformed_data", - ); - - // many pipes - dialects.verified_stmt( - "SELECT * FROM CustomerOrders |> AGGREGATE SUM(cost) AS total_cost GROUP BY customer_id, state, item_type |> EXTEND COUNT(*) OVER (PARTITION BY customer_id) AS num_orders |> WHERE num_orders > 1 |> AGGREGATE AVG(total_cost) AS average GROUP BY state DESC, item_type ASC", - ); - - // join pipe operator - INNER JOIN - dialects.verified_stmt("SELECT * FROM users |> JOIN orders ON users.id = orders.user_id"); - dialects.verified_stmt("SELECT * FROM users |> INNER JOIN orders ON users.id = orders.user_id"); - - // join pipe operator - LEFT JOIN - dialects.verified_stmt("SELECT * FROM users |> LEFT JOIN orders ON users.id = orders.user_id"); - dialects.verified_stmt( - "SELECT * FROM users |> LEFT OUTER JOIN orders ON users.id = orders.user_id", - ); - - // join pipe operator - RIGHT JOIN - dialects.verified_stmt("SELECT * FROM users |> RIGHT JOIN orders ON users.id = orders.user_id"); - dialects.verified_stmt( - "SELECT * FROM users |> RIGHT OUTER JOIN orders ON users.id = orders.user_id", - ); - - // join pipe operator - FULL JOIN - dialects.verified_stmt("SELECT * FROM users |> FULL JOIN orders ON users.id = orders.user_id"); - dialects.verified_query_with_canonical( - "SELECT * FROM users |> FULL OUTER JOIN orders ON users.id = orders.user_id", - "SELECT * FROM users |> FULL JOIN orders ON users.id = orders.user_id", - ); - - // join pipe operator - CROSS JOIN - dialects.verified_stmt("SELECT * FROM users |> CROSS JOIN orders"); - - // join pipe operator with USING - dialects.verified_query_with_canonical( - "SELECT * FROM users |> JOIN orders USING (user_id)", - "SELECT * FROM users |> JOIN orders USING(user_id)", - ); - dialects.verified_query_with_canonical( - "SELECT * FROM users |> LEFT JOIN orders USING (user_id, order_date)", - "SELECT * FROM users |> LEFT JOIN orders USING(user_id, order_date)", - ); - - // join pipe operator with alias - dialects.verified_query_with_canonical( - "SELECT * FROM users |> JOIN orders o ON users.id = o.user_id", - "SELECT * FROM users |> JOIN orders AS o ON users.id = o.user_id", - ); - dialects.verified_stmt("SELECT * FROM users |> LEFT JOIN orders AS o ON users.id = o.user_id"); - - // join pipe operator with complex ON condition - dialects.verified_stmt("SELECT * FROM users |> JOIN orders ON users.id = orders.user_id AND orders.status = 'active'"); - dialects.verified_stmt("SELECT * FROM users |> LEFT JOIN orders ON users.id = orders.user_id AND orders.amount > 100"); - - // multiple join pipe operators - dialects.verified_stmt("SELECT * FROM users |> JOIN orders ON users.id = orders.user_id |> JOIN products ON orders.product_id = products.id"); - dialects.verified_stmt("SELECT * FROM users |> LEFT JOIN orders ON users.id = orders.user_id |> RIGHT JOIN products ON orders.product_id = products.id"); - - // join pipe operator with other pipe operators - dialects.verified_stmt("SELECT * FROM users |> JOIN orders ON users.id = orders.user_id |> WHERE orders.amount > 100"); - dialects.verified_stmt("SELECT * FROM users |> WHERE users.active = true |> LEFT JOIN orders ON users.id = orders.user_id"); - dialects.verified_stmt("SELECT * FROM users |> JOIN orders ON users.id = orders.user_id |> SELECT users.name, orders.amount"); -} - -#[test] -fn parse_pipeline_operator_negative_tests() { - let dialects = all_dialects_where(|d| d.supports_pipe_operator()); - - // Test that plain EXCEPT without DISTINCT fails - assert_eq!( - ParserError::ParserError("EXCEPT pipe operator requires DISTINCT modifier".to_string()), - dialects - .parse_sql_statements("SELECT * FROM users |> EXCEPT (SELECT * FROM admins)") - .unwrap_err() - ); - - // Test that EXCEPT ALL fails - assert_eq!( - ParserError::ParserError("EXCEPT pipe operator requires DISTINCT modifier".to_string()), - dialects - .parse_sql_statements("SELECT * FROM users |> EXCEPT ALL (SELECT * FROM admins)") - .unwrap_err() - ); - - // Test that EXCEPT BY NAME without DISTINCT fails - assert_eq!( - ParserError::ParserError("EXCEPT pipe operator requires DISTINCT modifier".to_string()), - dialects - .parse_sql_statements("SELECT * FROM users |> EXCEPT BY NAME (SELECT * FROM admins)") - .unwrap_err() - ); - - // Test that EXCEPT ALL BY NAME fails - assert_eq!( - ParserError::ParserError("EXCEPT pipe operator requires DISTINCT modifier".to_string()), - dialects - .parse_sql_statements( - "SELECT * FROM users |> EXCEPT ALL BY NAME (SELECT * FROM admins)" - ) - .unwrap_err() - ); - - // Test that plain INTERSECT without DISTINCT fails - assert_eq!( - ParserError::ParserError("INTERSECT pipe operator requires DISTINCT modifier".to_string()), - dialects - .parse_sql_statements("SELECT * FROM users |> INTERSECT (SELECT * FROM admins)") - .unwrap_err() - ); - - // Test that INTERSECT ALL fails - assert_eq!( - ParserError::ParserError("INTERSECT pipe operator requires DISTINCT modifier".to_string()), - dialects - .parse_sql_statements("SELECT * FROM users |> INTERSECT ALL (SELECT * FROM admins)") - .unwrap_err() - ); - - // Test that INTERSECT BY NAME without DISTINCT fails - assert_eq!( - ParserError::ParserError("INTERSECT pipe operator requires DISTINCT modifier".to_string()), - dialects - .parse_sql_statements("SELECT * FROM users |> INTERSECT BY NAME (SELECT * FROM admins)") - .unwrap_err() - ); - - // Test that INTERSECT ALL BY NAME fails - assert_eq!( - ParserError::ParserError("INTERSECT pipe operator requires DISTINCT modifier".to_string()), - dialects - .parse_sql_statements( - "SELECT * FROM users |> INTERSECT ALL BY NAME (SELECT * FROM admins)" - ) - .unwrap_err() - ); - - // Test that CALL without function name fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> CALL") - .is_err()); - - // Test that CALL without parentheses fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> CALL my_function") - .is_err()); - - // Test that CALL with invalid function syntax fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> CALL 123invalid") - .is_err()); - - // Test that CALL with malformed arguments fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> CALL my_function(,)") - .is_err()); - - // Test that CALL with invalid alias syntax fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> CALL my_function() AS") - .is_err()); - - // Test that PIVOT without parentheses fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> PIVOT SUM(amount) FOR month IN ('Jan')") - .is_err()); - - // Test that PIVOT without FOR keyword fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> PIVOT(SUM(amount) month IN ('Jan'))") - .is_err()); - - // Test that PIVOT without IN keyword fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> PIVOT(SUM(amount) FOR month ('Jan'))") - .is_err()); - - // Test that PIVOT with empty IN list fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> PIVOT(SUM(amount) FOR month IN ())") - .is_err()); - - // Test that PIVOT with invalid alias syntax fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> PIVOT(SUM(amount) FOR month IN ('Jan')) AS") - .is_err()); - - // Test UNPIVOT negative cases - - // Test that UNPIVOT without parentheses fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> UNPIVOT value FOR name IN col1, col2") - .is_err()); - - // Test that UNPIVOT without FOR keyword fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> UNPIVOT(value name IN (col1, col2))") - .is_err()); - - // Test that UNPIVOT without IN keyword fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> UNPIVOT(value FOR name (col1, col2))") - .is_err()); - - // Test that UNPIVOT with missing value column fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> UNPIVOT(FOR name IN (col1, col2))") - .is_err()); - - // Test that UNPIVOT with missing name column fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> UNPIVOT(value FOR IN (col1, col2))") - .is_err()); - - // Test that UNPIVOT with empty IN list fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> UNPIVOT(value FOR name IN ())") - .is_err()); - - // Test that UNPIVOT with invalid alias syntax fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> UNPIVOT(value FOR name IN (col1, col2)) AS") - .is_err()); - - // Test that UNPIVOT with missing closing parenthesis fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> UNPIVOT(value FOR name IN (col1, col2)") - .is_err()); - - // Test that JOIN without table name fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> JOIN ON users.id = orders.user_id") - .is_err()); - - // Test that CROSS JOIN with ON condition fails - assert!(dialects - .parse_sql_statements( - "SELECT * FROM users |> CROSS JOIN orders ON users.id = orders.user_id" - ) - .is_err()); - - // Test that CROSS JOIN with USING condition fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> CROSS JOIN orders USING (user_id)") - .is_err()); - - // Test that JOIN with empty USING list fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> JOIN orders USING ()") - .is_err()); - - // Test that JOIN with malformed ON condition fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> JOIN orders ON") - .is_err()); - - // Test that JOIN with invalid USING syntax fails - assert!(dialects - .parse_sql_statements("SELECT * FROM users |> JOIN orders USING user_id") - .is_err()); -} - -#[test] -fn parse_multiple_set_statements() -> Result<(), ParserError> { - let dialects = all_dialects_where(|d| d.supports_comma_separated_set_assignments()); - let stmt = dialects.verified_stmt("SET @a = 1, b = 2"); - - match stmt { - Statement::Set(Set::MultipleAssignments { assignments }) => { - assert_eq!( - assignments, - vec![ - SetAssignment { - scope: None, - name: ObjectName::from(vec!["@a".into()]), - value: Expr::value(number("1")) - }, - SetAssignment { - scope: None, - name: ObjectName::from(vec!["b".into()]), - value: Expr::value(number("2")) - } - ] - ); - } - _ => panic!("Expected SetVariable with 2 variables and 2 values"), - }; - - let stmt = dialects.verified_stmt("SET GLOBAL @a = 1, SESSION b = 2, LOCAL c = 3, d = 4"); - - match stmt { - Statement::Set(Set::MultipleAssignments { assignments }) => { - assert_eq!( - assignments, - vec![ - SetAssignment { - scope: Some(ContextModifier::Global), - name: ObjectName::from(vec!["@a".into()]), - value: Expr::value(number("1")) - }, - SetAssignment { - scope: Some(ContextModifier::Session), - name: ObjectName::from(vec!["b".into()]), - value: Expr::value(number("2")) - }, - SetAssignment { - scope: Some(ContextModifier::Local), - name: ObjectName::from(vec!["c".into()]), - value: Expr::value(number("3")) - }, - SetAssignment { - scope: None, - name: ObjectName::from(vec!["d".into()]), - value: Expr::value(number("4")) - } - ] - ); - } - _ => panic!("Expected MultipleAssignments with 4 scoped variables and 4 values"), - }; - - Ok(()) -} - -#[test] -fn parse_set_time_zone_alias() { - match all_dialects().verified_stmt("SET TIME ZONE 'UTC'") { - Statement::Set(Set::SetTimeZone { local, value }) => { - assert!(!local); - assert_eq!( - value, - Expr::Value((Value::SingleQuotedString("UTC".into())).with_empty_span()) - ); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_return() { - let stmt = all_dialects().verified_stmt("RETURN"); - assert_eq!(stmt, Statement::Return(ReturnStatement { value: None })); - - let _ = all_dialects().verified_stmt("RETURN 1"); -} - -#[test] -fn parse_subquery_limit() { - let _ = all_dialects().verified_stmt("SELECT t1_id, t1_name FROM t1 WHERE t1_id IN (SELECT t2_id FROM t2 WHERE t1_name = t2_name LIMIT 10)"); -} - -#[test] -fn test_open() { - let open_cursor = "OPEN Employee_Cursor"; - let stmt = all_dialects().verified_stmt(open_cursor); - assert_eq!( - stmt, - Statement::Open(OpenStatement { - cursor_name: Ident::new("Employee_Cursor"), - }) - ); -} - -#[test] -fn parse_truncate_only() { - let truncate = all_dialects().verified_stmt("TRUNCATE TABLE employee, ONLY dept"); - - let table_names = vec![ - TruncateTableTarget { - name: ObjectName::from(vec![Ident::new("employee")]), - only: false, - }, - TruncateTableTarget { - name: ObjectName::from(vec![Ident::new("dept")]), - only: true, - }, - ]; - - assert_eq!( - Statement::Truncate { - table_names, - partitions: None, - table: true, - identity: None, - cascade: None, - on_cluster: None, - }, - truncate - ); -} - -#[test] -fn check_enforced() { - all_dialects().verified_stmt( - "CREATE TABLE t (a INT, b INT, c INT, CHECK (a > 0) NOT ENFORCED, CHECK (b > 0) ENFORCED, CHECK (c > 0))", - ); -} - -#[test] -fn join_precedence() { - all_dialects_except(|d| !d.supports_left_associative_joins_without_parens()) - .verified_query_with_canonical( - "SELECT * - FROM t1 - NATURAL JOIN t5 - INNER JOIN t0 ON (t0.v1 + t5.v0) > 0 - WHERE t0.v1 = t1.v0", - // canonical string without parentheses - "SELECT * FROM t1 NATURAL JOIN t5 INNER JOIN t0 ON (t0.v1 + t5.v0) > 0 WHERE t0.v1 = t1.v0", - ); - all_dialects_except(|d| d.supports_left_associative_joins_without_parens()).verified_query_with_canonical( - "SELECT * - FROM t1 - NATURAL JOIN t5 - INNER JOIN t0 ON (t0.v1 + t5.v0) > 0 - WHERE t0.v1 = t1.v0", - // canonical string with parentheses - "SELECT * FROM t1 NATURAL JOIN (t5 INNER JOIN t0 ON (t0.v1 + t5.v0) > 0) WHERE t0.v1 = t1.v0", - ); -} - -#[test] -fn parse_create_procedure_with_language() { - let sql = r#"CREATE PROCEDURE test_proc LANGUAGE sql AS BEGIN SELECT 1; END"#; - match verified_stmt(sql) { - Statement::CreateProcedure { - or_alter, - name, - params, - language, - .. - } => { - assert_eq!(or_alter, false); - assert_eq!(name.to_string(), "test_proc"); - assert_eq!(params, Some(vec![])); - assert_eq!( - language, - Some(Ident { - value: "sql".into(), - quote_style: None, - span: Span { - start: Location::empty(), - end: Location::empty() - } - }) - ); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_create_procedure_with_parameter_modes() { - let sql = r#"CREATE PROCEDURE test_proc (IN a INTEGER, OUT b TEXT, INOUT c TIMESTAMP, d BOOL) AS BEGIN SELECT 1; END"#; - match verified_stmt(sql) { - Statement::CreateProcedure { - or_alter, - name, - params, - .. - } => { - assert_eq!(or_alter, false); - assert_eq!(name.to_string(), "test_proc"); - let fake_span = Span { - start: Location { line: 0, column: 0 }, - end: Location { line: 0, column: 0 }, - }; - assert_eq!( - params, - Some(vec![ - ProcedureParam { - name: Ident { - value: "a".into(), - quote_style: None, - span: fake_span, - }, - data_type: DataType::Integer(None), - mode: Some(ArgMode::In) - }, - ProcedureParam { - name: Ident { - value: "b".into(), - quote_style: None, - span: fake_span, - }, - data_type: DataType::Text, - mode: Some(ArgMode::Out) - }, - ProcedureParam { - name: Ident { - value: "c".into(), - quote_style: None, - span: fake_span, - }, - data_type: DataType::Timestamp(None, TimezoneInfo::None), - mode: Some(ArgMode::InOut) - }, - ProcedureParam { - name: Ident { - value: "d".into(), - quote_style: None, - span: fake_span, - }, - data_type: DataType::Bool, - mode: None - }, - ]) - ); - } - _ => unreachable!(), - } -} - -#[test] -fn test_select_exclude() { - let dialects = all_dialects_where(|d| d.supports_select_wildcard_exclude()); - match &dialects - .verified_only_select("SELECT * EXCLUDE c1 FROM test") - .projection[0] - { - SelectItem::Wildcard(WildcardAdditionalOptions { opt_exclude, .. }) => { - assert_eq!( - *opt_exclude, - Some(ExcludeSelectItem::Single(Ident::new("c1"))) - ); - } - _ => unreachable!(), - } - match &dialects - .verified_only_select("SELECT * EXCLUDE (c1, c2) FROM test") - .projection[0] - { - SelectItem::Wildcard(WildcardAdditionalOptions { opt_exclude, .. }) => { - assert_eq!( - *opt_exclude, - Some(ExcludeSelectItem::Multiple(vec![ - Ident::new("c1"), - Ident::new("c2") - ])) - ); - } - _ => unreachable!(), - } - let select = dialects.verified_only_select("SELECT * EXCLUDE c1, c2 FROM test"); - match &select.projection[0] { - SelectItem::Wildcard(WildcardAdditionalOptions { opt_exclude, .. }) => { - assert_eq!( - *opt_exclude, - Some(ExcludeSelectItem::Single(Ident::new("c1"))) - ); - } - _ => unreachable!(), - } - match &select.projection[1] { - SelectItem::UnnamedExpr(Expr::Identifier(ident)) => { - assert_eq!(*ident, Ident::new("c2")); - } - _ => unreachable!(), - } - - let dialects = all_dialects_where(|d| d.supports_select_exclude()); - let select = dialects.verified_only_select("SELECT *, c1 EXCLUDE c1 FROM test"); - match &select.projection[0] { - SelectItem::Wildcard(additional_options) => { - assert_eq!(*additional_options, WildcardAdditionalOptions::default()); - } - _ => unreachable!(), - } - assert_eq!( - select.exclude, - Some(ExcludeSelectItem::Single(Ident::new("c1"))) - ); - - let dialects = all_dialects_where(|d| { - d.supports_select_wildcard_exclude() && !d.supports_select_exclude() - }); - let select = dialects.verified_only_select("SELECT * EXCLUDE c1 FROM test"); - match &select.projection[0] { - SelectItem::Wildcard(WildcardAdditionalOptions { opt_exclude, .. }) => { - assert_eq!( - *opt_exclude, - Some(ExcludeSelectItem::Single(Ident::new("c1"))) - ); - } - _ => unreachable!(), - } - - // Dialects that only support the wildcard form and do not accept EXCLUDE as an implicity alias - // will fail when encountered with the `c2` ident - let dialects = all_dialects_where(|d| { - d.supports_select_wildcard_exclude() - && !d.supports_select_exclude() - && d.is_column_alias(&Keyword::EXCLUDE, &mut Parser::new(d)) - }); - assert_eq!( - dialects - .parse_sql_statements("SELECT *, c1 EXCLUDE c2 FROM test") - .err() - .unwrap(), - ParserError::ParserError("Expected: end of statement, found: c2".to_string()) - ); - - // Dialects that only support the wildcard form and accept EXCLUDE as an implicity alias - // will fail when encountered with the `EXCLUDE` keyword - let dialects = all_dialects_where(|d| { - d.supports_select_wildcard_exclude() - && !d.supports_select_exclude() - && !d.is_column_alias(&Keyword::EXCLUDE, &mut Parser::new(d)) - }); - assert_eq!( - dialects - .parse_sql_statements("SELECT *, c1 EXCLUDE c2 FROM test") - .err() - .unwrap(), - ParserError::ParserError("Expected: end of statement, found: EXCLUDE".to_string()) - ); -} - -#[test] -fn test_no_semicolon_required_between_statements() { - let sql = r#" -SELECT * FROM tbl1 -SELECT * FROM tbl2 - "#; - - let dialects = all_dialects_with_options(ParserOptions { - trailing_commas: false, - unescape: true, - require_semicolon_stmt_delimiter: false, - }); - let stmts = dialects.parse_sql_statements(sql).unwrap(); - assert_eq!(stmts.len(), 2); - assert!(stmts.iter().all(|s| matches!(s, Statement::Query { .. }))); -} diff --git a/tests/sqlparser_custom_dialect.rs b/tests/sqlparser_custom_dialect.rs index cee604ac..e9ca82ab 100644 --- a/tests/sqlparser_custom_dialect.rs +++ b/tests/sqlparser_custom_dialect.rs @@ -41,7 +41,7 @@ fn custom_prefix_parser() -> Result<(), ParserError> { fn parse_prefix(&self, parser: &mut Parser) -> Option> { if parser.consume_token(&Token::Number("1".to_string(), false)) { - Some(Ok(Expr::Value(Value::Null.with_empty_span()))) + Some(Ok(Expr::Value(Value::Null))) } else { None } @@ -115,11 +115,7 @@ fn custom_statement_parser() -> Result<(), ParserError> { for _ in 0..3 { let _ = parser.next_token(); } - Some(Ok(Statement::Commit { - chain: false, - end: false, - modifier: None, - })) + Some(Ok(Statement::Commit { chain: false })) } else { None } diff --git a/tests/sqlparser_databricks.rs b/tests/sqlparser_databricks.rs index a27e0699..7b917bd0 100644 --- a/tests/sqlparser_databricks.rs +++ b/tests/sqlparser_databricks.rs @@ -15,11 +15,9 @@ // specific language governing permissions and limitations // under the License. -use sqlparser::ast::helpers::attached_token::AttachedToken; use sqlparser::ast::*; use sqlparser::dialect::{DatabricksDialect, GenericDialect}; use sqlparser::parser::ParserError; -use sqlparser::tokenizer::Span; use test_utils::*; #[macro_use] @@ -49,9 +47,7 @@ fn test_databricks_identifiers() { databricks() .verified_only_select(r#"SELECT "Ä""#) .projection[0], - SelectItem::UnnamedExpr(Expr::Value( - (Value::DoubleQuotedString("Ä".to_owned())).with_empty_span() - )) + SelectItem::UnnamedExpr(Expr::Value(Value::DoubleQuotedString("Ä".to_owned()))) ); } @@ -66,9 +62,9 @@ fn test_databricks_exists() { call( "array", [ - Expr::value(number("1")), - Expr::value(number("2")), - Expr::value(number("3")) + Expr::Value(number("1")), + Expr::Value(number("2")), + Expr::Value(number("3")) ] ), Expr::Lambda(LambdaFunction { @@ -103,44 +99,40 @@ fn test_databricks_lambdas() { call( "array", [ - Expr::value(Value::SingleQuotedString("Hello".to_owned())), - Expr::value(Value::SingleQuotedString("World".to_owned())) + Expr::Value(Value::SingleQuotedString("Hello".to_owned())), + Expr::Value(Value::SingleQuotedString("World".to_owned())) ] ), Expr::Lambda(LambdaFunction { params: OneOrManyWithParens::Many(vec![Ident::new("p1"), Ident::new("p2")]), body: Box::new(Expr::Case { - case_token: AttachedToken::empty(), - end_token: AttachedToken::empty(), operand: None, conditions: vec![ - CaseWhen { - condition: Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("p1"))), - op: BinaryOperator::Eq, - right: Box::new(Expr::Identifier(Ident::new("p2"))) - }, - result: Expr::value(number("0")) - }, - CaseWhen { - condition: Expr::BinaryOp { - left: Box::new(call( - "reverse", - [Expr::Identifier(Ident::new("p1"))] - )), - op: BinaryOperator::Lt, - right: Box::new(call( - "reverse", - [Expr::Identifier(Ident::new("p2"))] - )), - }, - result: Expr::UnaryOp { - op: UnaryOperator::Minus, - expr: Box::new(Expr::value(number("1"))) - } + Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("p1"))), + op: BinaryOperator::Eq, + right: Box::new(Expr::Identifier(Ident::new("p2"))) }, + Expr::BinaryOp { + left: Box::new(call( + "reverse", + [Expr::Identifier(Ident::new("p1"))] + )), + op: BinaryOperator::Lt, + right: Box::new(call( + "reverse", + [Expr::Identifier(Ident::new("p2"))] + )) + } ], - else_result: Some(Box::new(Expr::value(number("1")))) + results: vec![ + Expr::Value(number("0")), + Expr::UnaryOp { + op: UnaryOperator::Minus, + expr: Box::new(Expr::Value(number("1"))) + } + ], + else_result: Some(Box::new(Expr::Value(number("1")))) }) }) ] @@ -160,12 +152,12 @@ fn test_values_clause() { explicit_row: false, rows: vec![ vec![ - Expr::Value((Value::DoubleQuotedString("one".to_owned())).with_empty_span()), - Expr::value(number("1")), + Expr::Value(Value::DoubleQuotedString("one".to_owned())), + Expr::Value(number("1")), ], vec![ - Expr::Value((Value::SingleQuotedString("two".to_owned())).with_empty_span()), - Expr::value(number("2")), + Expr::Value(Value::SingleQuotedString("two".to_owned())), + Expr::Value(number("2")), ], ], }; @@ -193,9 +185,15 @@ fn test_values_clause() { "SELECT * FROM values", )); assert_eq!( - Some(&table_from_name(ObjectName::from(vec![Ident::new( - "values" - )]))), + Some(&TableFactor::Table { + name: ObjectName(vec![Ident::new("values")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }), query .body .as_select() @@ -214,16 +212,16 @@ fn parse_use() { for object_name in &valid_object_names { // Test single identifier without quotes assert_eq!( - databricks().verified_stmt(&format!("USE {object_name}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( + databricks().verified_stmt(&format!("USE {}", object_name)), + Statement::Use(Use::Object(ObjectName(vec![Ident::new( object_name.to_string() )]))) ); for "e in "e_styles { // Test single identifier with different type of quotes assert_eq!( - databricks().verified_stmt(&format!("USE {quote}{object_name}{quote}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( + databricks().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)), + Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -234,22 +232,22 @@ fn parse_use() { for "e in "e_styles { // Test single identifier with keyword and different type of quotes assert_eq!( - databricks().verified_stmt(&format!("USE CATALOG {quote}my_catalog{quote}")), - Statement::Use(Use::Catalog(ObjectName::from(vec![Ident::with_quote( + databricks().verified_stmt(&format!("USE CATALOG {0}my_catalog{0}", quote)), + Statement::Use(Use::Catalog(ObjectName(vec![Ident::with_quote( quote, "my_catalog".to_string(), )]))) ); assert_eq!( - databricks().verified_stmt(&format!("USE DATABASE {quote}my_database{quote}")), - Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote( + databricks().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)), + Statement::Use(Use::Database(ObjectName(vec![Ident::with_quote( quote, "my_database".to_string(), )]))) ); assert_eq!( - databricks().verified_stmt(&format!("USE SCHEMA {quote}my_schema{quote}")), - Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote( + databricks().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)), + Statement::Use(Use::Schema(ObjectName(vec![Ident::with_quote( quote, "my_schema".to_string(), )]))) @@ -259,19 +257,15 @@ fn parse_use() { // Test single identifier with keyword and no quotes assert_eq!( databricks().verified_stmt("USE CATALOG my_catalog"), - Statement::Use(Use::Catalog(ObjectName::from(vec![Ident::new( - "my_catalog" - )]))) + Statement::Use(Use::Catalog(ObjectName(vec![Ident::new("my_catalog")]))) ); assert_eq!( databricks().verified_stmt("USE DATABASE my_schema"), - Statement::Use(Use::Database(ObjectName::from(vec![Ident::new( - "my_schema" - )]))) + Statement::Use(Use::Database(ObjectName(vec![Ident::new("my_schema")]))) ); assert_eq!( databricks().verified_stmt("USE SCHEMA my_schema"), - Statement::Use(Use::Schema(ObjectName::from(vec![Ident::new("my_schema")]))) + Statement::Use(Use::Schema(ObjectName(vec![Ident::new("my_schema")]))) ); // Test invalid syntax - missing identifier @@ -283,84 +277,3 @@ fn parse_use() { ); } } - -#[test] -fn parse_databricks_struct_function() { - assert_eq!( - databricks_and_generic() - .verified_only_select("SELECT STRUCT(1, 'foo')") - .projection[0], - SelectItem::UnnamedExpr(Expr::Struct { - values: vec![ - Expr::value(number("1")), - Expr::Value((Value::SingleQuotedString("foo".to_string())).with_empty_span()) - ], - fields: vec![] - }) - ); - assert_eq!( - databricks_and_generic() - .verified_only_select("SELECT STRUCT(1 AS one, 'foo' AS foo, false)") - .projection[0], - SelectItem::UnnamedExpr(Expr::Struct { - values: vec![ - Expr::Named { - expr: Expr::value(number("1")).into(), - name: Ident::new("one") - }, - Expr::Named { - expr: Expr::Value( - (Value::SingleQuotedString("foo".to_string())).with_empty_span() - ) - .into(), - name: Ident::new("foo") - }, - Expr::Value((Value::Boolean(false)).with_empty_span()) - ], - fields: vec![] - }) - ); -} - -#[test] -fn data_type_timestamp_ntz() { - // Literal - assert_eq!( - databricks().verified_expr("TIMESTAMP_NTZ '2025-03-29T18:52:00'"), - Expr::TypedString { - data_type: DataType::TimestampNtz, - value: ValueWithSpan { - value: Value::SingleQuotedString("2025-03-29T18:52:00".to_owned()), - span: Span::empty(), - } - } - ); - - // Cast - assert_eq!( - databricks().verified_expr("(created_at)::TIMESTAMP_NTZ"), - Expr::Cast { - kind: CastKind::DoubleColon, - expr: Box::new(Expr::Nested(Box::new(Expr::Identifier( - "created_at".into() - )))), - data_type: DataType::TimestampNtz, - format: None - } - ); - - // Column definition - match databricks().verified_stmt("CREATE TABLE foo (x TIMESTAMP_NTZ)") { - Statement::CreateTable(CreateTable { columns, .. }) => { - assert_eq!( - columns, - vec![ColumnDef { - name: "x".into(), - data_type: DataType::TimestampNtz, - options: vec![], - }] - ); - } - s => panic!("Unexpected statement: {s:?}"), - } -} diff --git a/tests/sqlparser_duckdb.rs b/tests/sqlparser_duckdb.rs index fe14b7ba..d68f3771 100644 --- a/tests/sqlparser_duckdb.rs +++ b/tests/sqlparser_duckdb.rs @@ -18,13 +18,10 @@ #[macro_use] mod test_utils; -use helpers::attached_token::AttachedToken; -use sqlparser::tokenizer::Span; use test_utils::*; use sqlparser::ast::*; use sqlparser::dialect::{DuckDbDialect, GenericDialect}; -use sqlparser::parser::ParserError; fn duckdb() -> TestedDialects { TestedDialects::new(vec![Box::new(DuckDbDialect {})]) @@ -45,12 +42,10 @@ fn test_struct() { StructField { field_name: Some(Ident::new("v")), field_type: DataType::Varchar(None), - options: None, }, StructField { field_name: Some(Ident::new("i")), field_type: DataType::Integer(None), - options: None, }, ], StructBracketKind::Parentheses, @@ -63,6 +58,7 @@ fn test_struct() { vec![ColumnDef { name: "s".into(), data_type: struct_type1.clone(), + collation: None, options: vec![], }] ); @@ -77,6 +73,7 @@ fn test_struct() { Box::new(struct_type1), None )), + collation: None, options: vec![], }] ); @@ -87,7 +84,6 @@ fn test_struct() { StructField { field_name: Some(Ident::new("v")), field_type: DataType::Varchar(None), - options: None, }, StructField { field_name: Some(Ident::new("s")), @@ -96,17 +92,14 @@ fn test_struct() { StructField { field_name: Some(Ident::new("a1")), field_type: DataType::Integer(None), - options: None, }, StructField { field_name: Some(Ident::new("a2")), field_type: DataType::Varchar(None), - options: None, }, ], StructBracketKind::Parentheses, ), - options: None, }, ], StructBracketKind::Parentheses, @@ -125,6 +118,7 @@ fn test_struct() { Box::new(struct_type2), None )), + collation: None, options: vec![], }] ); @@ -164,7 +158,7 @@ fn test_select_wildcard_with_exclude() { let select = duckdb().verified_only_select("SELECT name.* EXCLUDE department_id FROM employee_table"); let expected = SelectItem::QualifiedWildcard( - SelectItemQualifiedWildcardKind::ObjectName(ObjectName::from(vec![Ident::new("name")])), + ObjectName(vec![Ident::new("name")]), WildcardAdditionalOptions { opt_exclude: Some(ExcludeSelectItem::Single(Ident::new("department_id"))), ..Default::default() @@ -195,7 +189,7 @@ fn test_create_macro() { let expected = Statement::CreateMacro { or_replace: false, temporary: false, - name: ObjectName::from(vec![Ident::new("schema"), Ident::new("add")]), + name: ObjectName(vec![Ident::new("schema"), Ident::new("add")]), args: Some(vec![MacroArg::new("a"), MacroArg::new("b")]), definition: MacroDefinition::Expr(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("a"))), @@ -212,12 +206,12 @@ fn test_create_macro_default_args() { let expected = Statement::CreateMacro { or_replace: false, temporary: false, - name: ObjectName::from(vec![Ident::new("add_default")]), + name: ObjectName(vec![Ident::new("add_default")]), args: Some(vec![ MacroArg::new("a"), MacroArg { name: Ident::new("b"), - default_expr: Some(Expr::value(number("5"))), + default_expr: Some(Expr::Value(number("5"))), }, ]), definition: MacroDefinition::Expr(Expr::BinaryOp { @@ -240,7 +234,7 @@ fn test_create_table_macro() { let expected = Statement::CreateMacro { or_replace: true, temporary: true, - name: ObjectName::from(vec![Ident::new("dynamic_table")]), + name: ObjectName(vec![Ident::new("dynamic_table")]), args: Some(vec![ MacroArg::new("col1_value"), MacroArg::new("col2_value"), @@ -265,19 +259,30 @@ fn test_select_union_by_name() { op: SetOperator::Union, set_quantifier: *expected_quantifier, left: Box::::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, - projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())], - exclude: None, top_before_distinct: false, + projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions { + opt_ilike: None, + opt_exclude: None, + opt_except: None, + opt_rename: None, + opt_replace: None, + })], into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident { - value: "capitals".to_string(), - quote_style: None, - span: Span::empty(), - }])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident { + value: "capitals".to_string(), + quote_style: None, + }]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }], lateral_views: vec![], @@ -293,22 +298,32 @@ fn test_select_union_by_name() { qualify: None, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), right: Box::::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, - projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())], - exclude: None, top_before_distinct: false, + projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions { + opt_ilike: None, + opt_exclude: None, + opt_except: None, + opt_rename: None, + opt_replace: None, + })], into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident { - value: "weather".to_string(), - quote_style: None, - span: Span::empty(), - }])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident { + value: "weather".to_string(), + quote_style: None, + }]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }], lateral_views: vec![], @@ -324,7 +339,6 @@ fn test_select_union_by_name() { qualify: None, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), }); assert_eq!(ast.body, expected); @@ -339,8 +353,7 @@ fn test_duckdb_install() { Statement::Install { extension_name: Ident { value: "tpch".to_string(), - quote_style: None, - span: Span::empty() + quote_style: None } } ); @@ -353,40 +366,13 @@ fn test_duckdb_load_extension() { Statement::Load { extension_name: Ident { value: "my_extension".to_string(), - quote_style: None, - span: Span::empty() + quote_style: None } }, stmt ); } -#[test] -fn test_duckdb_specific_int_types() { - let duckdb_dtypes = vec![ - ("UTINYINT", DataType::UTinyInt), - ("USMALLINT", DataType::USmallInt), - ("UBIGINT", DataType::UBigInt), - ("UHUGEINT", DataType::UHugeInt), - ("HUGEINT", DataType::HugeInt), - ]; - for (dtype_string, data_type) in duckdb_dtypes { - let sql = format!("SELECT 123::{dtype_string}"); - let select = duckdb().verified_only_select(&sql); - assert_eq!( - &Expr::Cast { - kind: CastKind::DoubleColon, - expr: Box::new(Expr::Value( - Value::Number("123".parse().unwrap(), false).with_empty_span() - )), - data_type: data_type.clone(), - format: None, - }, - expr_from_projection(&select.projection[0]) - ); - } -} - #[test] fn test_duckdb_struct_literal() { //struct literal syntax https://duckdb.org/docs/sql/data_types/struct#creating-structs @@ -398,15 +384,15 @@ fn test_duckdb_struct_literal() { &Expr::Dictionary(vec![ DictionaryField { key: Ident::with_quote('\'', "a"), - value: Box::new(Expr::value(number("1"))), + value: Box::new(Expr::Value(number("1"))), }, DictionaryField { key: Ident::with_quote('\'', "b"), - value: Box::new(Expr::value(number("2"))), + value: Box::new(Expr::Value(number("2"))), }, DictionaryField { key: Ident::with_quote('\'', "c"), - value: Box::new(Expr::value(number("3"))), + value: Box::new(Expr::Value(number("3"))), }, ],), expr_from_projection(&select.projection[0]) @@ -416,9 +402,7 @@ fn test_duckdb_struct_literal() { &Expr::Array(Array { elem: vec![Expr::Dictionary(vec![DictionaryField { key: Ident::with_quote('\'', "a"), - value: Box::new(Expr::Value( - (Value::SingleQuotedString("abc".to_string())).with_empty_span() - )), + value: Box::new(Expr::Value(Value::SingleQuotedString("abc".to_string()))), },],)], named: false }), @@ -428,7 +412,7 @@ fn test_duckdb_struct_literal() { &Expr::Dictionary(vec![ DictionaryField { key: Ident::with_quote('\'', "a"), - value: Box::new(Expr::value(number("1"))), + value: Box::new(Expr::Value(number("1"))), }, DictionaryField { key: Ident::with_quote('\'', "b"), @@ -447,14 +431,11 @@ fn test_duckdb_struct_literal() { &Expr::Dictionary(vec![ DictionaryField { key: Ident::with_quote('\'', "a"), - value: Expr::value(number("1")).into(), + value: Expr::Value(number("1")).into(), }, DictionaryField { key: Ident::with_quote('\'', "b"), - value: Expr::Value( - (Value::SingleQuotedString("abc".to_string())).with_empty_span() - ) - .into(), + value: Expr::Value(Value::SingleQuotedString("abc".to_string())).into(), }, ],), expr_from_projection(&select.projection[3]) @@ -471,7 +452,7 @@ fn test_duckdb_struct_literal() { key: Ident::with_quote('\'', "a"), value: Expr::Dictionary(vec![DictionaryField { key: Ident::with_quote('\'', "aa"), - value: Expr::value(number("1")).into(), + value: Expr::Value(number("1")).into(), }],) .into(), }],), @@ -626,24 +607,23 @@ fn test_duckdb_named_argument_function_with_assignment_operator() { let select = duckdb_and_generic().verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("FUN")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("FUN")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, args: vec![ FunctionArg::Named { name: Ident::new("a"), - arg: FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("1".to_owned())).with_empty_span() - )), + arg: FunctionArgExpr::Expr(Expr::Value(Value::SingleQuotedString( + "1".to_owned() + ))), operator: FunctionArgOperator::Assignment }, FunctionArg::Named { name: Ident::new("b"), - arg: FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("2".to_owned())).with_empty_span() - )), + arg: FunctionArgExpr::Expr(Expr::Value(Value::SingleQuotedString( + "2".to_owned() + ))), operator: FunctionArgOperator::Assignment }, ], @@ -669,18 +649,18 @@ fn test_array_index() { _ => panic!("Expected an expression with alias"), }; assert_eq!( - &Expr::CompoundFieldAccess { - root: Box::new(Expr::Array(Array { + &Expr::Subscript { + expr: Box::new(Expr::Array(Array { elem: vec![ - Expr::Value((Value::SingleQuotedString("a".to_owned())).with_empty_span()), - Expr::Value((Value::SingleQuotedString("b".to_owned())).with_empty_span()), - Expr::Value((Value::SingleQuotedString("c".to_owned())).with_empty_span()) + Expr::Value(Value::SingleQuotedString("a".to_owned())), + Expr::Value(Value::SingleQuotedString("b".to_owned())), + Expr::Value(Value::SingleQuotedString("c".to_owned())) ], named: false })), - access_chain: vec![AccessExpr::Subscript(Subscript::Index { - index: Expr::value(number("3")) - })] + subscript: Box::new(Subscript::Index { + index: Expr::Value(number("3")) + }) }, expr ); @@ -699,8 +679,7 @@ fn test_duckdb_union_datatype() { if_not_exists: Default::default(), transient: Default::default(), volatile: Default::default(), - iceberg: Default::default(), - name: ObjectName::from(vec!["tbl1".into()]), + name: ObjectName(vec!["tbl1".into()]), columns: vec![ ColumnDef { name: "one".into(), @@ -708,6 +687,7 @@ fn test_duckdb_union_datatype() { field_name: "a".into(), field_type: DataType::Int(None) }]), + collation: Default::default(), options: Default::default() }, ColumnDef { @@ -722,6 +702,7 @@ fn test_duckdb_union_datatype() { field_type: DataType::Int(None) } ]), + collation: Default::default(), options: Default::default() }, ColumnDef { @@ -733,6 +714,7 @@ fn test_duckdb_union_datatype() { field_type: DataType::Int(None) }]) }]), + collation: Default::default(), options: Default::default() } ], @@ -744,13 +726,19 @@ fn test_duckdb_union_datatype() { storage: Default::default(), location: Default::default() }), + table_properties: Default::default(), + with_options: Default::default(), file_format: Default::default(), location: Default::default(), query: Default::default(), without_rowid: Default::default(), like: Default::default(), clone: Default::default(), + engine: Default::default(), comment: Default::default(), + auto_increment_offset: Default::default(), + default_charset: Default::default(), + collation: Default::default(), on_commit: Default::default(), on_cluster: Default::default(), primary_key: Default::default(), @@ -758,7 +746,7 @@ fn test_duckdb_union_datatype() { partition_by: Default::default(), cluster_by: Default::default(), clustered_by: Default::default(), - inherits: Default::default(), + options: Default::default(), strict: Default::default(), copy_grants: Default::default(), enable_schema_evolution: Default::default(), @@ -768,13 +756,7 @@ fn test_duckdb_union_datatype() { default_ddl_collation: Default::default(), with_aggregation_policy: Default::default(), with_row_access_policy: Default::default(), - with_tags: Default::default(), - base_location: Default::default(), - external_volume: Default::default(), - catalog: Default::default(), - catalog_sync: Default::default(), - storage_serialization_policy: Default::default(), - table_options: CreateTableOptions::None + with_tags: Default::default() }), stmt ); @@ -795,16 +777,16 @@ fn parse_use() { for object_name in &valid_object_names { // Test single identifier without quotes assert_eq!( - duckdb().verified_stmt(&format!("USE {object_name}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( + duckdb().verified_stmt(&format!("USE {}", object_name)), + Statement::Use(Use::Object(ObjectName(vec![Ident::new( object_name.to_string() )]))) ); for "e in "e_styles { // Test single identifier with different type of quotes assert_eq!( - duckdb().verified_stmt(&format!("USE {quote}{object_name}{quote}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( + duckdb().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)), + Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -815,10 +797,8 @@ fn parse_use() { for "e in "e_styles { // Test double identifier with different type of quotes assert_eq!( - duckdb().verified_stmt(&format!( - "USE {quote}CATALOG{quote}.{quote}my_schema{quote}" - )), - Statement::Use(Use::Object(ObjectName::from(vec![ + duckdb().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)), + Statement::Use(Use::Object(ObjectName(vec![ Ident::with_quote(quote, "CATALOG"), Ident::with_quote(quote, "my_schema") ]))) @@ -827,38 +807,9 @@ fn parse_use() { // Test double identifier without quotes assert_eq!( duckdb().verified_stmt("USE mydb.my_schema"), - Statement::Use(Use::Object(ObjectName::from(vec![ + Statement::Use(Use::Object(ObjectName(vec![ Ident::new("mydb"), Ident::new("my_schema") ]))) ); } - -#[test] -fn test_duckdb_trim() { - let real_sql = r#"SELECT customer_id, TRIM(item_price_id, '"', "a") AS item_price_id FROM models_staging.subscriptions"#; - assert_eq!(duckdb().verified_stmt(real_sql).to_string(), real_sql); - - let sql_only_select = "SELECT TRIM('xyz', 'a')"; - let select = duckdb().verified_only_select(sql_only_select); - assert_eq!( - &Expr::Trim { - expr: Box::new(Expr::Value( - Value::SingleQuotedString("xyz".to_owned()).with_empty_span() - )), - trim_where: None, - trim_what: None, - trim_characters: Some(vec![Expr::Value( - Value::SingleQuotedString("a".to_owned()).with_empty_span() - )]), - }, - expr_from_projection(only(&select.projection)) - ); - - // missing comma separation - let error_sql = "SELECT TRIM('xyz' 'a')"; - assert_eq!( - ParserError::ParserError("Expected: ), found: 'a'".to_owned()), - duckdb().parse_sql_statements(error_sql).unwrap_err() - ); -} diff --git a/tests/sqlparser_hive.rs b/tests/sqlparser_hive.rs index 56a72ec8..10bd374c 100644 --- a/tests/sqlparser_hive.rs +++ b/tests/sqlparser_hive.rs @@ -21,11 +21,11 @@ //! is also tested (on the inputs it can handle). use sqlparser::ast::{ - ClusteredBy, CommentDef, CreateFunction, CreateFunctionBody, CreateFunctionUsing, CreateTable, - Expr, Function, FunctionArgumentList, FunctionArguments, Ident, ObjectName, OrderByExpr, - OrderByOptions, SelectItem, Set, Statement, TableFactor, UnaryOperator, Use, Value, + ClusteredBy, CommentDef, CreateFunctionBody, CreateFunctionUsing, CreateTable, Expr, Function, + FunctionArgumentList, FunctionArguments, Ident, ObjectName, OneOrManyWithParens, OrderByExpr, + SelectItem, Statement, TableFactor, UnaryOperator, Use, Value, }; -use sqlparser::dialect::{AnsiDialect, GenericDialect, HiveDialect}; +use sqlparser::dialect::{GenericDialect, HiveDialect, MsSqlDialect}; use sqlparser::parser::ParserError; use sqlparser::test_utils::*; @@ -91,7 +91,7 @@ fn parse_msck() { } #[test] -fn parse_set_hivevar() { +fn parse_set() { let set = "SET HIVEVAR:name = a, b, c_d"; hive().verified_stmt(set); } @@ -133,7 +133,9 @@ fn create_table_with_comment() { Statement::CreateTable(CreateTable { comment, .. }) => { assert_eq!( comment, - Some(CommentDef::WithoutEq("table comment".to_string())) + Some(CommentDef::AfterColumnDefsWithoutEq( + "table comment".to_string() + )) ) } _ => unreachable!(), @@ -168,18 +170,14 @@ fn create_table_with_clustered_by() { sorted_by: Some(vec![ OrderByExpr { expr: Expr::Identifier(Ident::new("a")), - options: OrderByOptions { - asc: Some(true), - nulls_first: None, - }, + asc: Some(true), + nulls_first: None, with_fill: None, }, OrderByExpr { expr: Expr::Identifier(Ident::new("b")), - options: OrderByOptions { - asc: Some(false), - nulls_first: None, - }, + asc: Some(false), + nulls_first: None, with_fill: None, }, ]), @@ -341,9 +339,6 @@ fn lateral_view() { fn sort_by() { let sort_by = "SELECT * FROM db.table SORT BY a"; hive().verified_stmt(sort_by); - - let sort_by_with_direction = "SELECT * FROM db.table SORT BY a, b DESC"; - hive().verified_stmt(sort_by_with_direction); } #[test] @@ -369,20 +364,20 @@ fn from_cte() { fn set_statement_with_minus() { assert_eq!( hive().verified_stmt("SET hive.tez.java.opts = -Xmx4g"), - Statement::Set(Set::SingleAssignment { - scope: None, + Statement::SetVariable { + local: false, hivevar: false, - variable: ObjectName::from(vec![ + variables: OneOrManyWithParens::One(ObjectName(vec![ Ident::new("hive"), Ident::new("tez"), Ident::new("java"), Ident::new("opts") - ]), - values: vec![Expr::UnaryOp { + ])), + value: vec![Expr::UnaryOp { op: UnaryOperator::Minus, expr: Box::new(Expr::Identifier(Ident::new("Xmx4g"))) }], - }) + } ); assert_eq!( @@ -397,20 +392,19 @@ fn set_statement_with_minus() { fn parse_create_function() { let sql = "CREATE TEMPORARY FUNCTION mydb.myfunc AS 'org.random.class.Name' USING JAR 'hdfs://somewhere.com:8020/very/far'"; match hive().verified_stmt(sql) { - Statement::CreateFunction(CreateFunction { + Statement::CreateFunction { temporary, name, function_body, using, .. - }) => { + } => { assert!(temporary); assert_eq!(name.to_string(), "mydb.myfunc"); assert_eq!( function_body, Some(CreateFunctionBody::AsBeforeOptions(Expr::Value( - (Value::SingleQuotedString("org.random.class.Name".to_string())) - .with_empty_span() + Value::SingleQuotedString("org.random.class.Name".to_string()) ))) ); assert_eq!( @@ -424,7 +418,7 @@ fn parse_create_function() { } // Test error in dialect that doesn't support parsing CREATE FUNCTION - let unsupported_dialects = TestedDialects::new(vec![Box::new(AnsiDialect {})]); + let unsupported_dialects = TestedDialects::new(vec![Box::new(MsSqlDialect {})]); assert_eq!( unsupported_dialects.parse_sql_statements(sql).unwrap_err(), @@ -463,14 +457,8 @@ fn parse_delimited_identifiers() { version, with_ordinality: _, partitions: _, - json_path: _, - sample: _, - index_hints: _, } => { - assert_eq!( - ObjectName::from(vec![Ident::with_quote('"', "a table")]), - name - ); + assert_eq!(vec![Ident::with_quote('"', "a table")], name.0); assert_eq!(Ident::with_quote('"', "alias"), alias.unwrap().name); assert!(args.is_none()); assert!(with_hints.is_empty()); @@ -489,8 +477,7 @@ fn parse_delimited_identifiers() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::with_quote('"', "myfun")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::with_quote('"', "myfun")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -524,16 +511,16 @@ fn parse_use() { for object_name in &valid_object_names { // Test single identifier without quotes assert_eq!( - hive().verified_stmt(&format!("USE {object_name}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( + hive().verified_stmt(&format!("USE {}", object_name)), + Statement::Use(Use::Object(ObjectName(vec![Ident::new( object_name.to_string() )]))) ); for "e in "e_styles { // Test single identifier with different type of quotes assert_eq!( - hive().verified_stmt(&format!("USE {quote}{object_name}{quote}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( + hive().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)), + Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -547,15 +534,6 @@ fn parse_use() { ); } -#[test] -fn test_tample_sample() { - hive().verified_stmt("SELECT * FROM source TABLESAMPLE (BUCKET 3 OUT OF 32 ON rand()) AS s"); - hive().verified_stmt("SELECT * FROM source TABLESAMPLE (BUCKET 3 OUT OF 16 ON id)"); - hive().verified_stmt("SELECT * FROM source TABLESAMPLE (100M) AS s"); - hive().verified_stmt("SELECT * FROM source TABLESAMPLE (0.1 PERCENT) AS s"); - hive().verified_stmt("SELECT * FROM source TABLESAMPLE (10 ROWS)"); -} - fn hive() -> TestedDialects { TestedDialects::new(vec![Box::new(HiveDialect {})]) } diff --git a/tests/sqlparser_mssql.rs b/tests/sqlparser_mssql.rs index 50c6448d..c5f43b07 100644 --- a/tests/sqlparser_mssql.rs +++ b/tests/sqlparser_mssql.rs @@ -22,17 +22,14 @@ #[macro_use] mod test_utils; -use helpers::attached_token::AttachedToken; -use sqlparser::keywords::Keyword; -use sqlparser::tokenizer::{Location, Span, Token, TokenWithSpan, Word}; use test_utils::*; -use sqlparser::ast::DataType::{Int, Text, Varbinary}; +use sqlparser::ast::DataType::{Int, Text}; use sqlparser::ast::DeclareAssignment::MsSqlAssignment; use sqlparser::ast::Value::SingleQuotedString; use sqlparser::ast::*; use sqlparser::dialect::{GenericDialect, MsSqlDialect}; -use sqlparser::parser::{Parser, ParserError, ParserOptions}; +use sqlparser::parser::{Parser, ParserError}; #[test] fn parse_mssql_identifiers() { @@ -64,18 +61,15 @@ fn parse_table_time_travel() { select.from, vec![TableWithJoins { relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("t1")]), + name: ObjectName(vec![Ident::new("t1")]), alias: None, args: None, with_hints: vec![], version: Some(TableVersion::ForSystemTimeAsOf(Expr::Value( - (Value::SingleQuotedString(version)).with_empty_span() + Value::SingleQuotedString(version) ))), partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![] }, joins: vec![] },] @@ -100,325 +94,87 @@ fn parse_mssql_delimited_identifiers() { #[test] fn parse_create_procedure() { - let sql = "CREATE OR ALTER PROCEDURE test (@foo INT, @bar VARCHAR(256)) AS BEGIN SELECT 1; END"; + let sql = "CREATE OR ALTER PROCEDURE test (@foo INT, @bar VARCHAR(256)) AS BEGIN SELECT 1 END"; assert_eq!( ms().verified_stmt(sql), Statement::CreateProcedure { or_alter: true, - body: ConditionalStatements::BeginEnd(BeginEndStatements { - begin_token: AttachedToken::empty(), - statements: vec![Statement::Query(Box::new(Query { - with: None, - limit_clause: None, - fetch: None, - locks: vec![], - for_clause: None, - order_by: None, - settings: None, - format_clause: None, - pipe_operators: vec![], - body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), - distinct: None, - top: None, - top_before_distinct: false, - projection: vec![SelectItem::UnnamedExpr(Expr::Value( - (number("1")).with_empty_span() - ))], - exclude: None, - into: None, - from: vec![], - lateral_views: vec![], - prewhere: None, - selection: None, - group_by: GroupByExpr::Expressions(vec![], vec![]), - cluster_by: vec![], - distribute_by: vec![], - sort_by: vec![], - having: None, - named_window: vec![], - window_before_qualify: false, - qualify: None, - value_table_mode: None, - connect_by: None, - flavor: SelectFlavor::Standard, - }))) - }))], - end_token: AttachedToken::empty(), - }), + body: vec![Statement::Query(Box::new(Query { + with: None, + limit: None, + limit_by: vec![], + offset: None, + fetch: None, + locks: vec![], + for_clause: None, + order_by: None, + settings: None, + format_clause: None, + body: Box::new(SetExpr::Select(Box::new(Select { + distinct: None, + top: None, + top_before_distinct: false, + projection: vec![SelectItem::UnnamedExpr(Expr::Value(number("1")))], + into: None, + from: vec![], + lateral_views: vec![], + prewhere: None, + selection: None, + group_by: GroupByExpr::Expressions(vec![], vec![]), + cluster_by: vec![], + distribute_by: vec![], + sort_by: vec![], + having: None, + named_window: vec![], + window_before_qualify: false, + qualify: None, + value_table_mode: None, + connect_by: None, + }))) + }))], params: Some(vec![ ProcedureParam { name: Ident { value: "@foo".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, - data_type: DataType::Int(None), - mode: None, + data_type: DataType::Int(None) }, ProcedureParam { name: Ident { value: "@bar".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, data_type: DataType::Varchar(Some(CharacterLength::IntegerLength { length: 256, unit: None - })), - mode: None, + })) } ]), - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "test".into(), - quote_style: None, - span: Span::empty(), - }]), - language: None, + quote_style: None + }]) } ) } #[test] fn parse_mssql_create_procedure() { - let _ = ms_and_generic().verified_stmt("CREATE OR ALTER PROCEDURE foo AS SELECT 1;"); - let _ = ms_and_generic().verified_stmt("CREATE OR ALTER PROCEDURE foo AS BEGIN SELECT 1; END"); - let _ = ms_and_generic().verified_stmt("CREATE PROCEDURE foo AS BEGIN SELECT 1; END"); + let _ = ms_and_generic().verified_stmt("CREATE OR ALTER PROCEDURE foo AS BEGIN SELECT 1 END"); + let _ = ms_and_generic().verified_stmt("CREATE PROCEDURE foo AS BEGIN SELECT 1 END"); let _ = ms().verified_stmt( - "CREATE PROCEDURE foo AS BEGIN SELECT [myColumn] FROM [myschema].[mytable]; END", + "CREATE PROCEDURE foo AS BEGIN SELECT [myColumn] FROM [myschema].[mytable] END", ); let _ = ms_and_generic().verified_stmt( - "CREATE PROCEDURE foo (@CustomerName NVARCHAR(50)) AS BEGIN SELECT * FROM DEV; END", + "CREATE PROCEDURE foo (@CustomerName NVARCHAR(50)) AS BEGIN SELECT * FROM DEV END", ); - let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN UPDATE bar SET col = 'test'; END"); + let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN UPDATE bar SET col = 'test' END"); // Test a statement with END in it - let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN SELECT [foo], CASE WHEN [foo] IS NULL THEN 'empty' ELSE 'notempty' END AS [foo]; END"); + let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN SELECT [foo], CASE WHEN [foo] IS NULL THEN 'empty' ELSE 'notempty' END AS [foo] END"); // Multiple statements - let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN UPDATE bar SET col = 'test'; SELECT [foo] FROM BAR WHERE [FOO] > 10; END"); -} - -#[test] -fn parse_create_function() { - let return_expression_function = "CREATE FUNCTION some_scalar_udf(@foo INT, @bar VARCHAR(256)) RETURNS INT AS BEGIN RETURN 1; END"; - assert_eq!( - ms().verified_stmt(return_expression_function), - sqlparser::ast::Statement::CreateFunction(CreateFunction { - or_alter: false, - or_replace: false, - temporary: false, - if_not_exists: false, - name: ObjectName::from(vec![Ident::new("some_scalar_udf")]), - args: Some(vec![ - OperateFunctionArg { - mode: None, - name: Some(Ident::new("@foo")), - data_type: DataType::Int(None), - default_expr: None, - }, - OperateFunctionArg { - mode: None, - name: Some(Ident::new("@bar")), - data_type: DataType::Varchar(Some(CharacterLength::IntegerLength { - length: 256, - unit: None - })), - default_expr: None, - }, - ]), - return_type: Some(DataType::Int(None)), - function_body: Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements { - begin_token: AttachedToken::empty(), - statements: vec![Statement::Return(ReturnStatement { - value: Some(ReturnStatementValue::Expr(Expr::Value( - (number("1")).with_empty_span() - ))), - })], - end_token: AttachedToken::empty(), - })), - behavior: None, - called_on_null: None, - parallel: None, - using: None, - language: None, - determinism_specifier: None, - options: None, - remote_connection: None, - }), - ); - - let multi_statement_function = "\ - CREATE FUNCTION some_scalar_udf(@foo INT, @bar VARCHAR(256)) \ - RETURNS INT \ - AS \ - BEGIN \ - SET @foo = @foo + 1; \ - RETURN @foo; \ - END\ - "; - let _ = ms().verified_stmt(multi_statement_function); - - let multi_statement_function_without_as = multi_statement_function.replace(" AS", ""); - let _ = ms().one_statement_parses_to( - &multi_statement_function_without_as, - multi_statement_function, - ); - - let create_function_with_conditional = "\ - CREATE FUNCTION some_scalar_udf() \ - RETURNS INT \ - AS \ - BEGIN \ - IF 1 = 2 \ - BEGIN \ - RETURN 1; \ - END; \ - RETURN 0; \ - END\ - "; - let _ = ms().verified_stmt(create_function_with_conditional); - - let create_or_alter_function = "\ - CREATE OR ALTER FUNCTION some_scalar_udf(@foo INT, @bar VARCHAR(256)) \ - RETURNS INT \ - AS \ - BEGIN \ - SET @foo = @foo + 1; \ - RETURN @foo; \ - END\ - "; - let _ = ms().verified_stmt(create_or_alter_function); - - let create_function_with_return_expression = "\ - CREATE FUNCTION some_scalar_udf(@foo INT, @bar VARCHAR(256)) \ - RETURNS INT \ - AS \ - BEGIN \ - RETURN CONVERT(INT, 1) + 2; \ - END\ - "; - let _ = ms().verified_stmt(create_function_with_return_expression); - - let create_inline_table_value_function = "\ - CREATE FUNCTION some_inline_tvf(@foo INT, @bar VARCHAR(256)) \ - RETURNS TABLE \ - AS \ - RETURN (SELECT 1 AS col_1)\ - "; - let _ = ms().verified_stmt(create_inline_table_value_function); - - let create_inline_table_value_function_without_parentheses = "\ - CREATE FUNCTION some_inline_tvf(@foo INT, @bar VARCHAR(256)) \ - RETURNS TABLE \ - AS \ - RETURN SELECT 1 AS col_1\ - "; - let _ = ms().verified_stmt(create_inline_table_value_function_without_parentheses); - - let create_inline_table_value_function_without_as = - create_inline_table_value_function.replace(" AS", ""); - let _ = ms().one_statement_parses_to( - &create_inline_table_value_function_without_as, - create_inline_table_value_function, - ); - - let create_multi_statement_table_value_function = "\ - CREATE FUNCTION some_multi_statement_tvf(@foo INT, @bar VARCHAR(256)) \ - RETURNS @t TABLE (col_1 INT) \ - AS \ - BEGIN \ - INSERT INTO @t SELECT 1; \ - RETURN; \ - END\ - "; - let _ = ms().verified_stmt(create_multi_statement_table_value_function); - - let create_multi_statement_table_value_function_without_as = - create_multi_statement_table_value_function.replace(" AS", ""); - let _ = ms().one_statement_parses_to( - &create_multi_statement_table_value_function_without_as, - create_multi_statement_table_value_function, - ); - - let create_multi_statement_table_value_function_with_constraints = "\ - CREATE FUNCTION some_multi_statement_tvf(@foo INT, @bar VARCHAR(256)) \ - RETURNS @t TABLE (col_1 INT NOT NULL) \ - AS \ - BEGIN \ - INSERT INTO @t SELECT 1; \ - RETURN @t; \ - END\ - "; - let _ = ms().verified_stmt(create_multi_statement_table_value_function_with_constraints); - - let create_multi_statement_tvf_without_table_definition = "\ - CREATE FUNCTION incorrect_tvf(@foo INT, @bar VARCHAR(256)) \ - RETURNS @t TABLE () - AS \ - BEGIN \ - INSERT INTO @t SELECT 1; \ - RETURN @t; \ - END\ - "; - assert_eq!( - ParserError::ParserError("Unparsable function body".to_owned()), - ms().parse_sql_statements(create_multi_statement_tvf_without_table_definition) - .unwrap_err() - ); - - let create_inline_tvf_without_subquery_or_bare_select = "\ - CREATE FUNCTION incorrect_tvf(@foo INT, @bar VARCHAR(256)) \ - RETURNS TABLE - AS \ - RETURN 'hi'\ - "; - assert_eq!( - ParserError::ParserError( - "Expected a subquery (or bare SELECT statement) after RETURN".to_owned() - ), - ms().parse_sql_statements(create_inline_tvf_without_subquery_or_bare_select) - .unwrap_err() - ); -} - -#[test] -fn parse_create_function_parameter_default_values() { - let single_default_sql = - "CREATE FUNCTION test_func(@param1 INT = 42) RETURNS INT AS BEGIN RETURN @param1; END"; - assert_eq!( - ms().verified_stmt(single_default_sql), - Statement::CreateFunction(CreateFunction { - or_alter: false, - or_replace: false, - temporary: false, - if_not_exists: false, - name: ObjectName::from(vec![Ident::new("test_func")]), - args: Some(vec![OperateFunctionArg { - mode: None, - name: Some(Ident::new("@param1")), - data_type: DataType::Int(None), - default_expr: Some(Expr::Value((number("42")).with_empty_span())), - },]), - return_type: Some(DataType::Int(None)), - function_body: Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements { - begin_token: AttachedToken::empty(), - statements: vec![Statement::Return(ReturnStatement { - value: Some(ReturnStatementValue::Expr(Expr::Identifier(Ident::new( - "@param1" - )))), - })], - end_token: AttachedToken::empty(), - })), - behavior: None, - called_on_null: None, - parallel: None, - using: None, - language: None, - determinism_specifier: None, - options: None, - remote_connection: None, - }), - ); + let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN UPDATE bar SET col = 'test'; SELECT [foo] FROM BAR WHERE [FOO] > 10 END"); } #[test] @@ -437,279 +193,13 @@ fn parse_mssql_apply_join() { ); } -#[test] -fn parse_mssql_openjson() { - let select = ms().verified_only_select( - "SELECT B.kind, B.id_list \ - FROM t_test_table AS A \ - CROSS APPLY OPENJSON(A.param, '$.config') WITH (kind VARCHAR(20) '$.kind', [id_list] NVARCHAR(MAX) '$.id_list' AS JSON) AS B", - ); - assert_eq!( - vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("t_test_table")]), - alias: Some(TableAlias { - name: Ident::new("A"), - columns: vec![] - }), - args: None, - with_hints: vec![], - version: None, - with_ordinality: false, - partitions: vec![], - json_path: None, - sample: None, - index_hints: vec![] - }, - joins: vec![Join { - relation: TableFactor::OpenJsonTable { - json_expr: Expr::CompoundIdentifier( - vec![Ident::new("A"), Ident::new("param"),] - ), - json_path: Some(Value::SingleQuotedString("$.config".into())), - columns: vec![ - OpenJsonTableColumn { - name: Ident::new("kind"), - r#type: DataType::Varchar(Some(CharacterLength::IntegerLength { - length: 20, - unit: None - })), - path: Some("$.kind".into()), - as_json: false - }, - OpenJsonTableColumn { - name: Ident { - value: "id_list".into(), - quote_style: Some('['), - span: Span::empty(), - }, - r#type: DataType::Nvarchar(Some(CharacterLength::Max)), - path: Some("$.id_list".into()), - as_json: true - } - ], - alias: Some(TableAlias { - name: Ident::new("B"), - columns: vec![] - }) - }, - global: false, - join_operator: JoinOperator::CrossApply - }] - }], - select.from - ); - let select = ms().verified_only_select( - "SELECT B.kind, B.id_list \ - FROM t_test_table AS A \ - CROSS APPLY OPENJSON(A.param) WITH (kind VARCHAR(20) '$.kind', [id_list] NVARCHAR(MAX) '$.id_list' AS JSON) AS B", - ); - assert_eq!( - vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("t_test_table"),]), - alias: Some(TableAlias { - name: Ident::new("A"), - columns: vec![] - }), - args: None, - with_hints: vec![], - version: None, - with_ordinality: false, - partitions: vec![], - json_path: None, - sample: None, - index_hints: vec![] - }, - joins: vec![Join { - relation: TableFactor::OpenJsonTable { - json_expr: Expr::CompoundIdentifier( - vec![Ident::new("A"), Ident::new("param"),] - ), - json_path: None, - columns: vec![ - OpenJsonTableColumn { - name: Ident::new("kind"), - r#type: DataType::Varchar(Some(CharacterLength::IntegerLength { - length: 20, - unit: None - })), - path: Some("$.kind".into()), - as_json: false - }, - OpenJsonTableColumn { - name: Ident { - value: "id_list".into(), - quote_style: Some('['), - span: Span::empty(), - }, - r#type: DataType::Nvarchar(Some(CharacterLength::Max)), - path: Some("$.id_list".into()), - as_json: true - } - ], - alias: Some(TableAlias { - name: Ident::new("B"), - columns: vec![] - }) - }, - global: false, - join_operator: JoinOperator::CrossApply - }] - }], - select.from - ); - let select = ms().verified_only_select( - "SELECT B.kind, B.id_list \ - FROM t_test_table AS A \ - CROSS APPLY OPENJSON(A.param) WITH (kind VARCHAR(20), [id_list] NVARCHAR(MAX)) AS B", - ); - assert_eq!( - vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("t_test_table")]), - alias: Some(TableAlias { - name: Ident::new("A"), - columns: vec![] - }), - args: None, - with_hints: vec![], - version: None, - with_ordinality: false, - partitions: vec![], - json_path: None, - sample: None, - index_hints: vec![] - }, - joins: vec![Join { - relation: TableFactor::OpenJsonTable { - json_expr: Expr::CompoundIdentifier( - vec![Ident::new("A"), Ident::new("param"),] - ), - json_path: None, - columns: vec![ - OpenJsonTableColumn { - name: Ident::new("kind"), - r#type: DataType::Varchar(Some(CharacterLength::IntegerLength { - length: 20, - unit: None - })), - path: None, - as_json: false - }, - OpenJsonTableColumn { - name: Ident { - value: "id_list".into(), - quote_style: Some('['), - span: Span::empty(), - }, - r#type: DataType::Nvarchar(Some(CharacterLength::Max)), - path: None, - as_json: false - } - ], - alias: Some(TableAlias { - name: Ident::new("B"), - columns: vec![] - }) - }, - global: false, - join_operator: JoinOperator::CrossApply - }] - }], - select.from - ); - let select = ms_and_generic().verified_only_select( - "SELECT B.kind, B.id_list \ - FROM t_test_table AS A \ - CROSS APPLY OPENJSON(A.param, '$.config') AS B", - ); - assert_eq!( - vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("t_test_table")]), - alias: Some(TableAlias { - name: Ident::new("A"), - columns: vec![] - }), - args: None, - with_hints: vec![], - version: None, - with_ordinality: false, - partitions: vec![], - json_path: None, - sample: None, - index_hints: vec![], - }, - joins: vec![Join { - relation: TableFactor::OpenJsonTable { - json_expr: Expr::CompoundIdentifier( - vec![Ident::new("A"), Ident::new("param"),] - ), - json_path: Some(Value::SingleQuotedString("$.config".into())), - columns: vec![], - alias: Some(TableAlias { - name: Ident::new("B"), - columns: vec![] - }) - }, - global: false, - join_operator: JoinOperator::CrossApply - }] - }], - select.from - ); - let select = ms_and_generic().verified_only_select( - "SELECT B.kind, B.id_list \ - FROM t_test_table AS A \ - CROSS APPLY OPENJSON(A.param) AS B", - ); - assert_eq!( - vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("t_test_table")]), - alias: Some(TableAlias { - name: Ident::new("A"), - columns: vec![] - }), - args: None, - with_hints: vec![], - version: None, - with_ordinality: false, - partitions: vec![], - json_path: None, - sample: None, - index_hints: vec![], - }, - joins: vec![Join { - relation: TableFactor::OpenJsonTable { - json_expr: Expr::CompoundIdentifier( - vec![Ident::new("A"), Ident::new("param"),] - ), - json_path: None, - columns: vec![], - alias: Some(TableAlias { - name: Ident::new("B"), - columns: vec![] - }) - }, - global: false, - join_operator: JoinOperator::CrossApply - }] - }], - select.from - ); -} - #[test] fn parse_mssql_top_paren() { let sql = "SELECT TOP (5) * FROM foo"; let select = ms_and_generic().verified_only_select(sql); let top = select.top.unwrap(); assert_eq!( - Some(TopQuantity::Expr(Expr::Value( - (number("5")).with_empty_span() - ))), + Some(TopQuantity::Expr(Expr::Value(number("5")))), top.quantity ); assert!(!top.percent); @@ -721,9 +211,7 @@ fn parse_mssql_top_percent() { let select = ms_and_generic().verified_only_select(sql); let top = select.top.unwrap(); assert_eq!( - Some(TopQuantity::Expr(Expr::Value( - (number("5")).with_empty_span() - ))), + Some(TopQuantity::Expr(Expr::Value(number("5")))), top.quantity ); assert!(top.percent); @@ -735,9 +223,7 @@ fn parse_mssql_top_with_ties() { let select = ms_and_generic().verified_only_select(sql); let top = select.top.unwrap(); assert_eq!( - Some(TopQuantity::Expr(Expr::Value( - (number("5")).with_empty_span() - ))), + Some(TopQuantity::Expr(Expr::Value(number("5")))), top.quantity ); assert!(top.with_ties); @@ -749,9 +235,7 @@ fn parse_mssql_top_percent_with_ties() { let select = ms_and_generic().verified_only_select(sql); let top = select.top.unwrap(); assert_eq!( - Some(TopQuantity::Expr(Expr::Value( - (number("10")).with_empty_span() - ))), + Some(TopQuantity::Expr(Expr::Value(number("10")))), top.quantity ); assert!(top.percent); @@ -780,10 +264,9 @@ fn parse_mssql_create_role() { assert_eq_vec(&["mssql"], &names); assert_eq!( authorization_owner, - Some(ObjectName::from(vec![Ident { + Some(ObjectName(vec![Ident { value: "helena".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }])) ); } @@ -799,14 +282,12 @@ fn parse_alter_role() { [Statement::AlterRole { name: Ident { value: "old_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, operation: AlterRoleOperation::RenameRole { role_name: Ident { value: "new_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None } }, }] @@ -818,14 +299,12 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, operation: AlterRoleOperation::AddMember { member_name: Ident { value: "new_member".into(), - quote_style: None, - span: Span::empty(), + quote_style: None } }, } @@ -837,14 +316,12 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, operation: AlterRoleOperation::DropMember { member_name: Ident { value: "old_member".into(), - quote_style: None, - span: Span::empty(), + quote_style: None } }, } @@ -865,12 +342,10 @@ fn parse_delimited_identifiers() { args, with_hints, version, - .. + with_ordinality: _, + partitions: _, } => { - assert_eq!( - ObjectName::from(vec![Ident::with_quote('"', "a table")]), - name - ); + assert_eq!(vec![Ident::with_quote('"', "a table")], name.0); assert_eq!(Ident::with_quote('"', "alias"), alias.unwrap().name); assert!(args.is_none()); assert!(with_hints.is_empty()); @@ -889,8 +364,7 @@ fn parse_delimited_identifiers() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::with_quote('"', "myfun")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::with_quote('"', "myfun")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -922,11 +396,11 @@ fn parse_table_name_in_square_brackets() { let select = ms().verified_only_select(r#"SELECT [a column] FROM [a schema].[a table]"#); if let TableFactor::Table { name, .. } = only(select.from).relation { assert_eq!( - ObjectName::from(vec![ + vec![ Ident::with_quote('[', "a schema"), Ident::with_quote('[', "a table") - ]), - name + ], + name.0 ); } else { panic!("Expecting TableFactor::Table"); @@ -975,317 +449,6 @@ fn parse_for_json_expect_ast() { ); } -#[test] -fn parse_mssql_json_object() { - let select = ms().verified_only_select( - "SELECT JSON_OBJECT('user_name' : USER_NAME(), LOWER(@id_key) : @id_value, 'sid' : (SELECT @@SPID) ABSENT ON NULL)", - ); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), - .. - }) => { - assert!(matches!( - args[0], - FunctionArg::ExprNamed { - name: Expr::Value(ValueWithSpan { - value: Value::SingleQuotedString(_), - span: _ - }), - arg: FunctionArgExpr::Expr(Expr::Function(_)), - operator: FunctionArgOperator::Colon - } - )); - assert!(matches!( - args[1], - FunctionArg::ExprNamed { - name: Expr::Function(_), - arg: FunctionArgExpr::Expr(Expr::Identifier(_)), - operator: FunctionArgOperator::Colon - } - )); - assert!(matches!( - args[2], - FunctionArg::ExprNamed { - name: Expr::Value(ValueWithSpan { - value: Value::SingleQuotedString(_), - span: _ - }), - arg: FunctionArgExpr::Expr(Expr::Subquery(_)), - operator: FunctionArgOperator::Colon - } - )); - assert_eq!( - &[FunctionArgumentClause::JsonNullClause( - JsonNullClause::AbsentOnNull - )], - &clauses[..] - ); - } - _ => unreachable!(), - } - let select = ms().verified_only_select( - "SELECT s.session_id, JSON_OBJECT('security_id' : s.security_id, 'login' : s.login_name, 'status' : s.status) AS info \ - FROM sys.dm_exec_sessions AS s \ - WHERE s.is_user_process = 1", - ); - match &select.projection[1] { - SelectItem::ExprWithAlias { - expr: - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, .. }), - .. - }), - .. - } => { - assert!(matches!( - args[0], - FunctionArg::ExprNamed { - name: Expr::Value(ValueWithSpan { - value: Value::SingleQuotedString(_), - span: _ - }), - arg: FunctionArgExpr::Expr(Expr::CompoundIdentifier(_)), - operator: FunctionArgOperator::Colon - } - )); - assert!(matches!( - args[1], - FunctionArg::ExprNamed { - name: Expr::Value(ValueWithSpan { - value: Value::SingleQuotedString(_), - span: _ - }), - arg: FunctionArgExpr::Expr(Expr::CompoundIdentifier(_)), - operator: FunctionArgOperator::Colon - } - )); - assert!(matches!( - args[2], - FunctionArg::ExprNamed { - name: Expr::Value(ValueWithSpan { - value: Value::SingleQuotedString(_), - span: _ - }), - arg: FunctionArgExpr::Expr(Expr::CompoundIdentifier(_)), - operator: FunctionArgOperator::Colon - } - )); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_mssql_json_array() { - let select = ms().verified_only_select("SELECT JSON_ARRAY('a', 1, NULL, 2 NULL ON NULL)"); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), - .. - }) => { - assert_eq!( - &[ - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("a".into())).with_empty_span() - ))), - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (number("1")).with_empty_span() - ))), - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (Value::Null).with_empty_span() - ))), - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (number("2")).with_empty_span() - ))), - ], - &args[..] - ); - assert_eq!( - &[FunctionArgumentClause::JsonNullClause( - JsonNullClause::NullOnNull - )], - &clauses[..] - ); - } - _ => unreachable!(), - } - let select = ms().verified_only_select("SELECT JSON_ARRAY('a', 1, NULL, 2 ABSENT ON NULL)"); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), - .. - }) => { - assert_eq!( - &[ - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("a".into())).with_empty_span() - ))), - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (number("1")).with_empty_span() - ))), - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (Value::Null).with_empty_span() - ))), - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (number("2")).with_empty_span() - ))), - ], - &args[..] - ); - assert_eq!( - &[FunctionArgumentClause::JsonNullClause( - JsonNullClause::AbsentOnNull - )], - &clauses[..] - ); - } - _ => unreachable!(), - } - let select = ms().verified_only_select("SELECT JSON_ARRAY(NULL ON NULL)"); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), - .. - }) => { - assert!(args.is_empty()); - assert_eq!( - &[FunctionArgumentClause::JsonNullClause( - JsonNullClause::NullOnNull - )], - &clauses[..] - ); - } - _ => unreachable!(), - } - let select = ms().verified_only_select("SELECT JSON_ARRAY(ABSENT ON NULL)"); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), - .. - }) => { - assert!(args.is_empty()); - assert_eq!( - &[FunctionArgumentClause::JsonNullClause( - JsonNullClause::AbsentOnNull - )], - &clauses[..] - ); - } - _ => unreachable!(), - } - let select = ms().verified_only_select( - "SELECT JSON_ARRAY('a', JSON_OBJECT('name' : 'value', 'type' : 1) NULL ON NULL)", - ); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), - .. - }) => { - assert_eq!( - &FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("a".into())).with_empty_span() - ))), - &args[0] - ); - assert!(matches!( - args[1], - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Function(_))) - )); - assert_eq!( - &[FunctionArgumentClause::JsonNullClause( - JsonNullClause::NullOnNull - )], - &clauses[..] - ); - } - _ => unreachable!(), - } - let select = ms().verified_only_select( - "SELECT JSON_ARRAY('a', JSON_OBJECT('name' : 'value', 'type' : 1), JSON_ARRAY(1, NULL, 2 NULL ON NULL))", - ); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, .. }), - .. - }) => { - assert_eq!( - &FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (Value::SingleQuotedString("a".into())).with_empty_span() - ))), - &args[0] - ); - assert!(matches!( - args[1], - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Function(_))) - )); - assert!(matches!( - args[2], - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Function(_))) - )); - } - _ => unreachable!(), - } - let select = ms().verified_only_select("SELECT JSON_ARRAY(1, @id_value, (SELECT @@SPID))"); - match expr_from_projection(&select.projection[0]) { - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, .. }), - .. - }) => { - assert_eq!( - &FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - (number("1")).with_empty_span() - ))), - &args[0] - ); - assert!(matches!( - args[1], - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Identifier(_))) - )); - assert!(matches!( - args[2], - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Subquery(_))) - )); - } - _ => unreachable!(), - } - let select = ms().verified_only_select( - "SELECT s.session_id, JSON_ARRAY(s.host_name, s.program_name, s.client_interface_name NULL ON NULL) AS info \ - FROM sys.dm_exec_sessions AS s \ - WHERE s.is_user_process = 1", - ); - match &select.projection[1] { - SelectItem::ExprWithAlias { - expr: - Expr::Function(Function { - args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), - .. - }), - .. - } => { - assert!(matches!( - args[0], - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::CompoundIdentifier(_))) - )); - assert!(matches!( - args[1], - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::CompoundIdentifier(_))) - )); - assert!(matches!( - args[2], - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::CompoundIdentifier(_))) - )); - assert_eq!( - &[FunctionArgumentClause::JsonNullClause( - JsonNullClause::NullOnNull - )], - &clauses[..] - ); - } - _ => unreachable!(), - } -} - #[test] fn parse_ampersand_arobase() { // In SQL Server, a&@b means (a) & (@b), in PostgreSQL it means (a) &@ (b) @@ -1313,15 +476,15 @@ fn parse_convert() { unreachable!() }; assert!(!is_try); - assert_eq!(Expr::value(number("1")), *expr); + assert_eq!(Expr::Value(number("1")), *expr); assert_eq!(Some(DataType::Int(None)), data_type); assert!(charset.is_none()); assert!(target_before_value); assert_eq!( vec![ - Expr::value(number("2")), - Expr::value(number("3")), - Expr::Value((Value::Null).with_empty_span()), + Expr::Value(number("2")), + Expr::Value(number("3")), + Expr::Value(Value::Null), ], styles ); @@ -1350,33 +513,32 @@ fn parse_substring_in_select() { with: None, body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: Some(Distinct::Distinct), top: None, top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Substring { expr: Box::new(Expr::Identifier(Ident { value: "description".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None })), - substring_from: Some(Box::new(Expr::Value( - (number("0")).with_empty_span() - ))), - substring_for: Some(Box::new(Expr::Value( - (number("1")).with_empty_span() - ))), + substring_from: Some(Box::new(Expr::Value(number("0")))), + substring_for: Some(Box::new(Expr::Value(number("1")))), special: true, - shorthand: false, })], - exclude: None, into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident { - value: "test".to_string(), - quote_style: None, - span: Span::empty(), - }])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident { + value: "test".to_string(), + quote_style: None + }]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![] }], lateral_views: vec![], @@ -1392,16 +554,16 @@ fn parse_substring_in_select() { window_before_qualify: false, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], }), query ); @@ -1413,7 +575,7 @@ fn parse_substring_in_select() { #[test] fn parse_mssql_declare() { let sql = "DECLARE @foo CURSOR, @bar INT, @baz AS TEXT = 'foobar';"; - let ast = ms().parse_sql_statements(sql).unwrap(); + let ast = Parser::parse_sql(&MsSqlDialect {}, sql).unwrap(); assert_eq!( vec![Statement::Declare { @@ -1421,8 +583,7 @@ fn parse_mssql_declare() { Declare { names: vec![Ident { value: "@foo".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }], data_type: None, assignment: None, @@ -1436,8 +597,7 @@ fn parse_mssql_declare() { Declare { names: vec![Ident { value: "@bar".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }], data_type: Some(Int(None)), assignment: None, @@ -1451,13 +611,12 @@ fn parse_mssql_declare() { Declare { names: vec![Ident { value: "@baz".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }], data_type: Some(Text), - assignment: Some(MsSqlAssignment(Box::new(Expr::Value( - (SingleQuotedString("foobar".to_string())).with_empty_span() - )))), + assignment: Some(MsSqlAssignment(Box::new(Expr::Value(SingleQuotedString( + "foobar".to_string() + ))))), declare_type: None, binary: None, sensitive: None, @@ -1469,197 +628,6 @@ fn parse_mssql_declare() { }], ast ); - - let sql = "DECLARE @bar INT;SET @bar = 2;SELECT @bar * 4"; - let ast = ms().parse_sql_statements(sql).unwrap(); - assert_eq!( - vec![ - Statement::Declare { - stmts: vec![Declare { - names: vec![Ident::new("@bar"),], - data_type: Some(Int(None)), - assignment: None, - declare_type: None, - binary: None, - sensitive: None, - scroll: None, - hold: None, - for_query: None - }] - }, - Statement::Set(Set::SingleAssignment { - scope: None, - hivevar: false, - variable: ObjectName::from(vec![Ident::new("@bar")]), - values: vec![Expr::Value( - (Value::Number("2".parse().unwrap(), false)).with_empty_span() - )], - }), - Statement::Query(Box::new(Query { - with: None, - limit_clause: None, - fetch: None, - locks: vec![], - for_clause: None, - order_by: None, - settings: None, - format_clause: None, - pipe_operators: vec![], - - body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), - distinct: None, - top: None, - top_before_distinct: false, - projection: vec![SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("@bar"))), - op: BinaryOperator::Multiply, - right: Box::new(Expr::Value( - (Value::Number("4".parse().unwrap(), false)).with_empty_span() - )), - })], - exclude: None, - into: None, - from: vec![], - lateral_views: vec![], - prewhere: None, - selection: None, - group_by: GroupByExpr::Expressions(vec![], vec![]), - cluster_by: vec![], - distribute_by: vec![], - sort_by: vec![], - having: None, - named_window: vec![], - window_before_qualify: false, - qualify: None, - value_table_mode: None, - connect_by: None, - flavor: SelectFlavor::Standard, - }))) - })) - ], - ast - ); - - let declare_cursor_for_select = - "DECLARE vend_cursor CURSOR FOR SELECT * FROM Purchasing.Vendor"; - let _ = ms().verified_stmt(declare_cursor_for_select); -} - -#[test] -fn test_mssql_cursor() { - let full_cursor_usage = "\ - DECLARE Employee_Cursor CURSOR FOR \ - SELECT LastName, FirstName \ - FROM AdventureWorks2022.HumanResources.vEmployee \ - WHERE LastName LIKE 'B%'; \ - \ - OPEN Employee_Cursor; \ - \ - FETCH NEXT FROM Employee_Cursor; \ - \ - WHILE @@FETCH_STATUS = 0 \ - BEGIN \ - FETCH NEXT FROM Employee_Cursor; \ - END; \ - \ - CLOSE Employee_Cursor; \ - DEALLOCATE Employee_Cursor\ - "; - let _ = ms().statements_parse_to(full_cursor_usage, ""); -} - -#[test] -fn test_mssql_while_statement() { - let while_single_statement = "WHILE 1 = 0 PRINT 'Hello World';"; - let stmt = ms().verified_stmt(while_single_statement); - assert_eq!( - stmt, - Statement::While(sqlparser::ast::WhileStatement { - while_block: ConditionalStatementBlock { - start_token: AttachedToken(TokenWithSpan { - token: Token::Word(Word { - value: "WHILE".to_string(), - quote_style: None, - keyword: Keyword::WHILE - }), - span: Span::empty() - }), - condition: Some(Expr::BinaryOp { - left: Box::new(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), - op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::Number("0".parse().unwrap(), false)).with_empty_span() - )), - }), - then_token: None, - conditional_statements: ConditionalStatements::Sequence { - statements: vec![Statement::Print(PrintStatement { - message: Box::new(Expr::Value( - (Value::SingleQuotedString("Hello World".to_string())) - .with_empty_span() - )), - })], - } - } - }) - ); - - let while_begin_end = "\ - WHILE @@FETCH_STATUS = 0 \ - BEGIN \ - FETCH NEXT FROM Employee_Cursor; \ - END\ - "; - let _ = ms().verified_stmt(while_begin_end); - - let while_begin_end_multiple_statements = "\ - WHILE @@FETCH_STATUS = 0 \ - BEGIN \ - FETCH NEXT FROM Employee_Cursor; \ - PRINT 'Hello World'; \ - END\ - "; - let _ = ms().verified_stmt(while_begin_end_multiple_statements); -} - -#[test] -fn test_parse_raiserror() { - let sql = r#"RAISERROR('This is a test', 16, 1)"#; - let s = ms().verified_stmt(sql); - assert_eq!( - s, - Statement::RaisError { - message: Box::new(Expr::Value( - (Value::SingleQuotedString("This is a test".to_string())).with_empty_span() - )), - severity: Box::new(Expr::Value( - (Value::Number("16".parse().unwrap(), false)).with_empty_span() - )), - state: Box::new(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), - arguments: vec![], - options: vec![], - } - ); - - let sql = r#"RAISERROR('This is a test', 16, 1) WITH NOWAIT"#; - let _ = ms().verified_stmt(sql); - - let sql = r#"RAISERROR('This is a test', 16, 1, 'ARG') WITH SETERROR, LOG"#; - let _ = ms().verified_stmt(sql); - - let sql = r#"RAISERROR(N'This is message %s %d.', 10, 1, N'number', 5)"#; - let _ = ms().verified_stmt(sql); - - let sql = r#"RAISERROR(N'<<%*.*s>>', 10, 1, 7, 3, N'abcde')"#; - let _ = ms().verified_stmt(sql); - - let sql = r#"RAISERROR(@ErrorMessage, @ErrorSeverity, @ErrorState)"#; - let _ = ms().verified_stmt(sql); } #[test] @@ -1676,16 +644,16 @@ fn parse_use() { for object_name in &valid_object_names { // Test single identifier without quotes assert_eq!( - ms().verified_stmt(&format!("USE {object_name}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( + ms().verified_stmt(&format!("USE {}", object_name)), + Statement::Use(Use::Object(ObjectName(vec![Ident::new( object_name.to_string() )]))) ); for "e in "e_styles { // Test single identifier with different type of quotes assert_eq!( - ms().verified_stmt(&format!("USE {quote}{object_name}{quote}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( + ms().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)), + Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -1704,18 +672,16 @@ fn parse_create_table_with_valid_options() { key: Ident { value: "DISTRIBUTION".to_string(), quote_style: None, - span: Span::empty(), }, value: Expr::Identifier(Ident { value: "ROUND_ROBIN".to_string(), quote_style: None, - span: Span::empty(), }) }, SqlOption::Partition { column_name: "column_a".into(), range_direction: None, - for_values: vec![Expr::Value((test_utils::number("10")).with_empty_span()), Expr::Value((test_utils::number("11")).with_empty_span())] , + for_values: vec![Expr::Value(test_utils::number("10")), Expr::Value(test_utils::number("11"))] , }, ], ), @@ -1726,8 +692,8 @@ fn parse_create_table_with_valid_options() { column_name: "column_a".into(), range_direction: Some(PartitionRangeDirection::Left), for_values: vec![ - Expr::Value((test_utils::number("10")).with_empty_span()), - Expr::Value((test_utils::number("11")).with_empty_span()), + Expr::Value(test_utils::number("10")), + Expr::Value(test_utils::number("11")), ], } ], @@ -1753,7 +719,6 @@ fn parse_create_table_with_valid_options() { name: Ident { value: "column_a".to_string(), quote_style: None, - span: Span::empty(), }, asc: Some(true), }, @@ -1761,7 +726,6 @@ fn parse_create_table_with_valid_options() { name: Ident { value: "column_b".to_string(), quote_style: None, - span: Span::empty(), }, asc: Some(false), }, @@ -1769,7 +733,6 @@ fn parse_create_table_with_valid_options() { name: Ident { value: "column_c".to_string(), quote_style: None, - span: Span::empty(), }, asc: None, }, @@ -1783,20 +746,17 @@ fn parse_create_table_with_valid_options() { key: Ident { value: "DISTRIBUTION".to_string(), quote_style: None, - span: Span::empty(), }, value: Expr::Function( Function { - name: ObjectName::from( + name: ObjectName( vec![ Ident { value: "HASH".to_string(), quote_style: None, - span: Span::empty(), }, ], ), - uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List( FunctionArgumentList { @@ -1808,7 +768,6 @@ fn parse_create_table_with_valid_options() { Ident { value: "column_a".to_string(), quote_style: None, - span: Span::empty(), }, ), ), @@ -1819,7 +778,6 @@ fn parse_create_table_with_valid_options() { Ident { value: "column_b".to_string(), quote_style: None, - span: Span::empty(), }, ), ), @@ -1851,37 +809,36 @@ fn parse_create_table_with_valid_options() { if_not_exists: false, transient: false, volatile: false, - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "mytable".to_string(), quote_style: None, - span: Span::empty(), },],), columns: vec![ ColumnDef { name: Ident { value: "column_a".to_string(), quote_style: None, - span: Span::empty(), }, data_type: Int(None,), + collation: None, options: vec![], }, ColumnDef { name: Ident { value: "column_b".to_string(), quote_style: None, - span: Span::empty(), }, data_type: Int(None,), + collation: None, options: vec![], }, ColumnDef { name: Ident { value: "column_c".to_string(), quote_style: None, - span: Span::empty(), }, data_type: Int(None,), + collation: None, options: vec![], }, ], @@ -1893,13 +850,19 @@ fn parse_create_table_with_valid_options() { storage: None, location: None, },), + table_properties: vec![], + with_options, file_format: None, location: None, query: None, without_rowid: false, like: None, clone: None, + engine: None, comment: None, + auto_increment_offset: None, + default_charset: None, + collation: None, on_commit: None, on_cluster: None, primary_key: None, @@ -1907,9 +870,8 @@ fn parse_create_table_with_valid_options() { partition_by: None, cluster_by: None, clustered_by: None, - inherits: None, + options: None, strict: false, - iceberg: false, copy_grants: false, enable_schema_evolution: None, change_tracking: None, @@ -1919,33 +881,11 @@ fn parse_create_table_with_valid_options() { with_aggregation_policy: None, with_row_access_policy: None, with_tags: None, - base_location: None, - external_volume: None, - catalog: None, - catalog_sync: None, - storage_serialization_policy: None, - table_options: CreateTableOptions::With(with_options) }) ); } } -#[test] -fn parse_nested_slash_star_comment() { - let sql = r#" - select - /* - comment level 1 - /* - comment level 2 - */ - */ - 1; - "#; - let canonical = "SELECT 1"; - ms().one_statement_parses_to(sql, canonical); -} - #[test] fn parse_create_table_with_invalid_options() { let invalid_cases = vec![ @@ -2007,8 +947,8 @@ fn parse_create_table_with_identity_column() { IdentityProperty { parameters: Some(IdentityPropertyFormatKind::FunctionCall( IdentityParameters { - seed: Expr::value(number("1")), - increment: Expr::value(number("1")), + seed: Expr::Value(number("1")), + increment: Expr::Value(number("1")), }, )), order: None, @@ -2034,20 +974,17 @@ fn parse_create_table_with_identity_column() { if_not_exists: false, transient: false, volatile: false, - iceberg: false, - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "mytable".to_string(), quote_style: None, - span: Span::empty(), },],), columns: vec![ColumnDef { name: Ident { value: "columnA".to_string(), quote_style: None, - span: Span::empty(), }, data_type: Int(None,), - + collation: None, options: column_options, },], constraints: vec![], @@ -2058,13 +995,19 @@ fn parse_create_table_with_identity_column() { storage: None, location: None, },), + table_properties: vec![], + with_options: vec![], file_format: None, location: None, query: None, without_rowid: false, like: None, clone: None, + engine: None, comment: None, + auto_increment_offset: None, + default_charset: None, + collation: None, on_commit: None, on_cluster: None, primary_key: None, @@ -2072,7 +1015,7 @@ fn parse_create_table_with_identity_column() { partition_by: None, cluster_by: None, clustered_by: None, - inherits: None, + options: None, strict: false, copy_grants: false, enable_schema_evolution: None, @@ -2083,427 +1026,14 @@ fn parse_create_table_with_identity_column() { with_aggregation_policy: None, with_row_access_policy: None, with_tags: None, - base_location: None, - external_volume: None, - catalog: None, - catalog_sync: None, - storage_serialization_policy: None, - table_options: CreateTableOptions::None }), ); } } -#[test] -fn parse_true_false_as_identifiers() { - assert_eq!( - ms().verified_expr("true"), - Expr::Identifier(Ident::new("true")) - ); - assert_eq!( - ms().verified_expr("false"), - Expr::Identifier(Ident::new("false")) - ); -} - -#[test] -fn parse_mssql_set_session_value() { - ms().verified_stmt( - "SET OFFSETS SELECT, FROM, ORDER, TABLE, PROCEDURE, STATEMENT, PARAM, EXECUTE ON", - ); - ms().verified_stmt("SET IDENTITY_INSERT dbo.Tool ON"); - ms().verified_stmt("SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED"); - ms().verified_stmt("SET TRANSACTION ISOLATION LEVEL READ COMMITTED"); - ms().verified_stmt("SET TRANSACTION ISOLATION LEVEL REPEATABLE READ"); - ms().verified_stmt("SET TRANSACTION ISOLATION LEVEL SNAPSHOT"); - ms().verified_stmt("SET TRANSACTION ISOLATION LEVEL SERIALIZABLE"); - ms().verified_stmt("SET STATISTICS IO ON"); - ms().verified_stmt("SET STATISTICS XML ON"); - ms().verified_stmt("SET STATISTICS PROFILE ON"); - ms().verified_stmt("SET STATISTICS TIME ON"); - ms().verified_stmt("SET DATEFIRST 7"); - ms().verified_stmt("SET DATEFIRST @xxx"); - ms().verified_stmt("SET DATEFIRST @@xxx"); - ms().verified_stmt("SET DATEFORMAT dmy"); - ms().verified_stmt("SET DATEFORMAT @datevar"); - ms().verified_stmt("SET DATEFORMAT @@datevar"); - ms().verified_stmt("SET DEADLOCK_PRIORITY 'LOW'"); - ms().verified_stmt("SET DEADLOCK_PRIORITY LOW"); - ms().verified_stmt("SET DEADLOCK_PRIORITY 8"); - ms().verified_stmt("SET DEADLOCK_PRIORITY -8"); - ms().verified_stmt("SET DEADLOCK_PRIORITY @xxx"); - ms().verified_stmt("SET DEADLOCK_PRIORITY @@xxx"); - ms().verified_stmt("SET LOCK_TIMEOUT 1800"); - ms().verified_stmt("SET CONCAT_NULL_YIELDS_NULL ON"); - ms().verified_stmt("SET CURSOR_CLOSE_ON_COMMIT ON"); - ms().verified_stmt("SET FIPS_FLAGGER 'level'"); - ms().verified_stmt("SET FIPS_FLAGGER OFF"); - ms().verified_stmt("SET LANGUAGE Italian"); - ms().verified_stmt("SET QUOTED_IDENTIFIER ON"); - ms().verified_stmt("SET ARITHABORT ON"); - ms().verified_stmt("SET ARITHIGNORE OFF"); - ms().verified_stmt("SET FMTONLY ON"); - ms().verified_stmt("SET NOCOUNT OFF"); - ms().verified_stmt("SET NOEXEC ON"); - ms().verified_stmt("SET NUMERIC_ROUNDABORT ON"); - ms().verified_stmt("SET QUERY_GOVERNOR_COST_LIMIT 11"); - ms().verified_stmt("SET ROWCOUNT 4"); - ms().verified_stmt("SET ROWCOUNT @xxx"); - ms().verified_stmt("SET ROWCOUNT @@xxx"); - ms().verified_stmt("SET TEXTSIZE 11"); - ms().verified_stmt("SET ANSI_DEFAULTS ON"); - ms().verified_stmt("SET ANSI_NULL_DFLT_OFF ON"); - ms().verified_stmt("SET ANSI_NULL_DFLT_ON ON"); - ms().verified_stmt("SET ANSI_NULLS ON"); - ms().verified_stmt("SET ANSI_PADDING ON"); - ms().verified_stmt("SET ANSI_WARNINGS ON"); - ms().verified_stmt("SET FORCEPLAN ON"); - ms().verified_stmt("SET SHOWPLAN_ALL ON"); - ms().verified_stmt("SET SHOWPLAN_TEXT ON"); - ms().verified_stmt("SET SHOWPLAN_XML ON"); - ms().verified_stmt("SET IMPLICIT_TRANSACTIONS ON"); - ms().verified_stmt("SET REMOTE_PROC_TRANSACTIONS ON"); - ms().verified_stmt("SET XACT_ABORT ON"); - ms().verified_stmt("SET ANSI_NULLS, ANSI_PADDING ON"); -} - -#[test] -fn parse_mssql_if_else() { - // Simple statements and blocks - ms().verified_stmt("IF 1 = 1 SELECT '1'; ELSE SELECT '2';"); - ms().verified_stmt("IF 1 = 1 BEGIN SET @A = 1; END ELSE SET @A = 2;"); - ms().verified_stmt( - "IF DATENAME(weekday, GETDATE()) IN (N'Saturday', N'Sunday') SELECT 'Weekend'; ELSE SELECT 'Weekday';" - ); - ms().verified_stmt( - "IF (SELECT COUNT(*) FROM a.b WHERE c LIKE 'x%') > 1 SELECT 'yes'; ELSE SELECT 'No';", - ); - - // Multiple statements - let stmts = ms() - .parse_sql_statements("DECLARE @A INT; IF 1=1 BEGIN SET @A = 1 END ELSE SET @A = 2") - .unwrap(); - match &stmts[..] { - [Statement::Declare { .. }, Statement::If(stmt)] => { - assert_eq!( - stmt.to_string(), - "IF 1 = 1 BEGIN SET @A = 1; END ELSE SET @A = 2;" - ); - } - _ => panic!("Unexpected statements: {stmts:?}"), - } -} - -#[test] -fn test_mssql_if_else_span() { - let sql = "IF 1 = 1 SELECT '1' ELSE SELECT '2'"; - let mut parser = Parser::new(&MsSqlDialect {}).try_with_sql(sql).unwrap(); - assert_eq!( - parser.parse_statement().unwrap().span(), - Span::new(Location::new(1, 1), Location::new(1, sql.len() as u64 + 1)) - ); -} - -#[test] -fn test_mssql_if_else_multiline_span() { - let sql_line1 = "IF 1 = 1"; - let sql_line2 = "SELECT '1'"; - let sql_line3 = "ELSE SELECT '2'"; - let sql = [sql_line1, sql_line2, sql_line3].join("\n"); - let mut parser = Parser::new(&MsSqlDialect {}).try_with_sql(&sql).unwrap(); - assert_eq!( - parser.parse_statement().unwrap().span(), - Span::new( - Location::new(1, 1), - Location::new(3, sql_line3.len() as u64 + 1) - ) - ); -} - -#[test] -fn test_mssql_if_statements_span() { - // Simple statements - let mut sql = "IF 1 = 1 SELECT '1' ELSE SELECT '2'"; - let mut parser = Parser::new(&MsSqlDialect {}).try_with_sql(sql).unwrap(); - match parser.parse_statement().unwrap() { - Statement::If(IfStatement { - if_block, - else_block: Some(else_block), - .. - }) => { - assert_eq!( - if_block.span(), - Span::new(Location::new(1, 1), Location::new(1, 20)) - ); - assert_eq!( - else_block.span(), - Span::new(Location::new(1, 21), Location::new(1, 36)) - ); - } - stmt => panic!("Unexpected statement: {stmt:?}"), - } - - // Blocks - sql = "IF 1 = 1 BEGIN SET @A = 1; END ELSE BEGIN SET @A = 2 END"; - parser = Parser::new(&MsSqlDialect {}).try_with_sql(sql).unwrap(); - match parser.parse_statement().unwrap() { - Statement::If(IfStatement { - if_block, - else_block: Some(else_block), - .. - }) => { - assert_eq!( - if_block.span(), - Span::new(Location::new(1, 1), Location::new(1, 31)) - ); - assert_eq!( - else_block.span(), - Span::new(Location::new(1, 32), Location::new(1, 57)) - ); - } - stmt => panic!("Unexpected statement: {stmt:?}"), - } -} - -#[test] -fn parse_mssql_varbinary_max_length() { - let sql = "CREATE TABLE example (var_binary_col VARBINARY(MAX))"; - - match ms_and_generic().verified_stmt(sql) { - Statement::CreateTable(CreateTable { name, columns, .. }) => { - assert_eq!( - name, - ObjectName::from(vec![Ident { - value: "example".to_string(), - quote_style: None, - span: Span::empty(), - }]) - ); - assert_eq!( - columns, - vec![ColumnDef { - name: Ident::new("var_binary_col"), - data_type: Varbinary(Some(BinaryLength::Max)), - - options: vec![] - },], - ); - } - _ => unreachable!(), - } - - let sql = "CREATE TABLE example (var_binary_col VARBINARY(50))"; - - match ms_and_generic().verified_stmt(sql) { - Statement::CreateTable(CreateTable { name, columns, .. }) => { - assert_eq!( - name, - ObjectName::from(vec![Ident { - value: "example".to_string(), - quote_style: None, - span: Span::empty(), - }]) - ); - assert_eq!( - columns, - vec![ColumnDef { - name: Ident::new("var_binary_col"), - data_type: Varbinary(Some(BinaryLength::IntegerLength { length: 50 })), - - options: vec![] - },], - ); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_mssql_table_identifier_with_default_schema() { - ms().verified_stmt("SELECT * FROM mydatabase..MyTable"); -} - fn ms() -> TestedDialects { TestedDialects::new(vec![Box::new(MsSqlDialect {})]) } - -// MS SQL dialect with support for optional semi-colon statement delimiters -fn tsql() -> TestedDialects { - TestedDialects::new_with_options( - vec![Box::new(MsSqlDialect {})], - ParserOptions { - trailing_commas: false, - unescape: true, - require_semicolon_stmt_delimiter: false, - }, - ) -} - fn ms_and_generic() -> TestedDialects { TestedDialects::new(vec![Box::new(MsSqlDialect {}), Box::new(GenericDialect {})]) } - -#[test] -fn parse_mssql_merge_with_output() { - let stmt = "MERGE dso.products AS t \ - USING dsi.products AS \ - s ON s.ProductID = t.ProductID \ - WHEN MATCHED AND \ - NOT (t.ProductName = s.ProductName OR (ISNULL(t.ProductName, s.ProductName) IS NULL)) \ - THEN UPDATE SET t.ProductName = s.ProductName \ - WHEN NOT MATCHED BY TARGET \ - THEN INSERT (ProductID, ProductName) \ - VALUES (s.ProductID, s.ProductName) \ - WHEN NOT MATCHED BY SOURCE THEN DELETE \ - OUTPUT $action, deleted.ProductID INTO dsi.temp_products"; - ms_and_generic().verified_stmt(stmt); -} - -#[test] -fn parse_create_trigger() { - let create_trigger = "\ - CREATE OR ALTER TRIGGER reminder1 \ - ON Sales.Customer \ - AFTER INSERT, UPDATE \ - AS RAISERROR('Notify Customer Relations', 16, 10);\ - "; - let create_stmt = ms().verified_stmt(create_trigger); - assert_eq!( - create_stmt, - Statement::CreateTrigger { - or_alter: true, - or_replace: false, - is_constraint: false, - name: ObjectName::from(vec![Ident::new("reminder1")]), - period: TriggerPeriod::After, - events: vec![TriggerEvent::Insert, TriggerEvent::Update(vec![]),], - table_name: ObjectName::from(vec![Ident::new("Sales"), Ident::new("Customer")]), - referenced_table_name: None, - referencing: vec![], - trigger_object: TriggerObject::Statement, - include_each: false, - condition: None, - exec_body: None, - statements: Some(ConditionalStatements::Sequence { - statements: vec![Statement::RaisError { - message: Box::new(Expr::Value( - (Value::SingleQuotedString("Notify Customer Relations".to_string())) - .with_empty_span() - )), - severity: Box::new(Expr::Value( - (Value::Number("16".parse().unwrap(), false)).with_empty_span() - )), - state: Box::new(Expr::Value( - (Value::Number("10".parse().unwrap(), false)).with_empty_span() - )), - arguments: vec![], - options: vec![], - }], - }), - characteristics: None, - } - ); - - let multi_statement_as_trigger = "\ - CREATE TRIGGER some_trigger ON some_table FOR INSERT \ - AS \ - DECLARE @var INT; \ - RAISERROR('Trigger fired', 10, 1);\ - "; - let _ = ms().verified_stmt(multi_statement_as_trigger); - - let multi_statement_trigger = "\ - CREATE TRIGGER some_trigger ON some_table FOR INSERT \ - AS \ - BEGIN \ - DECLARE @var INT; \ - RAISERROR('Trigger fired', 10, 1); \ - END\ - "; - let _ = ms().verified_stmt(multi_statement_trigger); - - let create_trigger_with_return = "\ - CREATE TRIGGER some_trigger ON some_table FOR INSERT \ - AS \ - BEGIN \ - RETURN; \ - END\ - "; - let _ = ms().verified_stmt(create_trigger_with_return); - - let create_trigger_with_return = "\ - CREATE TRIGGER some_trigger ON some_table FOR INSERT \ - AS \ - BEGIN \ - RETURN; \ - END\ - "; - let _ = ms().verified_stmt(create_trigger_with_return); - - let create_trigger_with_conditional = "\ - CREATE TRIGGER some_trigger ON some_table FOR INSERT \ - AS \ - BEGIN \ - IF 1 = 2 \ - BEGIN \ - RAISERROR('Trigger fired', 10, 1); \ - END; \ - RETURN; \ - END\ - "; - let _ = ms().verified_stmt(create_trigger_with_conditional); -} - -#[test] -fn parse_drop_trigger() { - let sql_drop_trigger = "DROP TRIGGER emp_stamp;"; - let drop_stmt = ms().one_statement_parses_to(sql_drop_trigger, ""); - assert_eq!( - drop_stmt, - Statement::DropTrigger { - if_exists: false, - trigger_name: ObjectName::from(vec![Ident::new("emp_stamp")]), - table_name: None, - option: None, - } - ); -} - -#[test] -fn parse_print() { - let print_string_literal = "PRINT 'Hello, world!'"; - let print_stmt = ms().verified_stmt(print_string_literal); - assert_eq!( - print_stmt, - Statement::Print(PrintStatement { - message: Box::new(Expr::Value( - (Value::SingleQuotedString("Hello, world!".to_string())).with_empty_span() - )), - }) - ); - - let _ = ms().verified_stmt("PRINT N'Hello, ⛄️!'"); - let _ = ms().verified_stmt("PRINT @my_variable"); -} - -#[test] -fn parse_mssql_grant() { - ms().verified_stmt("GRANT SELECT ON my_table TO public, db_admin"); -} - -#[test] -fn parse_mssql_deny() { - ms().verified_stmt("DENY SELECT ON my_table TO public, db_admin"); -} - -#[test] -fn test_tsql_no_semicolon_delimiter() { - let sql = r#" -DECLARE @X AS NVARCHAR(MAX)='x' -DECLARE @Y AS NVARCHAR(MAX)='y' - "#; - - let stmts = tsql().parse_sql_statements(sql).unwrap(); - assert_eq!(stmts.len(), 2); - assert!(stmts.iter().all(|s| matches!(s, Statement::Declare { .. }))); -} diff --git a/tests/sqlparser_mysql.rs b/tests/sqlparser_mysql.rs index 9068ed9c..47f7f5b4 100644 --- a/tests/sqlparser_mysql.rs +++ b/tests/sqlparser_mysql.rs @@ -19,28 +19,18 @@ //! Test SQL syntax specific to MySQL. The parser based on the generic dialect //! is also tested (on the inputs it can handle). -use helpers::attached_token::AttachedToken; use matches::assert_matches; use sqlparser::ast::MysqlInsertPriority::{Delayed, HighPriority, LowPriority}; use sqlparser::ast::*; use sqlparser::dialect::{GenericDialect, MySqlDialect}; use sqlparser::parser::{ParserError, ParserOptions}; -use sqlparser::tokenizer::Span; use sqlparser::tokenizer::Token; use test_utils::*; #[macro_use] mod test_utils; -fn mysql() -> TestedDialects { - TestedDialects::new(vec![Box::new(MySqlDialect {})]) -} - -fn mysql_and_generic() -> TestedDialects { - TestedDialects::new(vec![Box::new(MySqlDialect {}), Box::new(GenericDialect {})]) -} - #[test] fn parse_identifiers() { mysql().verified_stmt("SELECT $a$, àà"); @@ -52,11 +42,11 @@ fn parse_literal_string() { let select = mysql().verified_only_select(sql); assert_eq!(2, select.projection.len()); assert_eq!( - &Expr::Value((Value::SingleQuotedString("single".to_string())).with_empty_span()), + &Expr::Value(Value::SingleQuotedString("single".to_string())), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value((Value::DoubleQuotedString("double".to_string())).with_empty_span()), + &Expr::Value(Value::DoubleQuotedString("double".to_string())), expr_from_projection(&select.projection[1]) ); } @@ -149,22 +139,19 @@ fn parse_flush() { read_lock: false, export: false, tables: vec![ - ObjectName::from(vec![ + ObjectName(vec![ Ident { value: "mek".to_string(), - quote_style: Some('`'), - span: Span::empty(), + quote_style: Some('`') }, Ident { value: "table1".to_string(), - quote_style: Some('`'), - span: Span::empty(), + quote_style: Some('`') } ]), - ObjectName::from(vec![Ident { + ObjectName(vec![Ident { value: "table2".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }]) ] } @@ -189,22 +176,19 @@ fn parse_flush() { read_lock: true, export: false, tables: vec![ - ObjectName::from(vec![ + ObjectName(vec![ Ident { value: "mek".to_string(), - quote_style: Some('`'), - span: Span::empty(), + quote_style: Some('`') }, Ident { value: "table1".to_string(), - quote_style: Some('`'), - span: Span::empty(), + quote_style: Some('`') } ]), - ObjectName::from(vec![Ident { + ObjectName(vec![Ident { value: "table2".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }]) ] } @@ -218,22 +202,19 @@ fn parse_flush() { read_lock: false, export: true, tables: vec![ - ObjectName::from(vec![ + ObjectName(vec![ Ident { value: "mek".to_string(), - quote_style: Some('`'), - span: Span::empty(), + quote_style: Some('`') }, Ident { value: "table1".to_string(), - quote_style: Some('`'), - span: Span::empty(), + quote_style: Some('`') } ]), - ObjectName::from(vec![Ident { + ObjectName(vec![Ident { value: "table2".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }]) ] } @@ -242,22 +223,14 @@ fn parse_flush() { #[test] fn parse_show_columns() { + let table_name = ObjectName(vec![Ident::new("mytable")]); assert_eq!( mysql_and_generic().verified_stmt("SHOW COLUMNS FROM mytable"), Statement::ShowColumns { extended: false, full: false, - show_options: ShowStatementOptions { - show_in: Some(ShowStatementIn { - clause: ShowStatementInClause::FROM, - parent_type: None, - parent_name: Some(ObjectName::from(vec![Ident::new("mytable")])), - }), - filter_position: None, - limit_from: None, - limit: None, - starts_with: None, - } + table_name: table_name.clone(), + filter: None, } ); assert_eq!( @@ -265,20 +238,8 @@ fn parse_show_columns() { Statement::ShowColumns { extended: false, full: false, - show_options: ShowStatementOptions { - show_in: Some(ShowStatementIn { - clause: ShowStatementInClause::FROM, - parent_type: None, - parent_name: Some(ObjectName::from(vec![ - Ident::new("mydb"), - Ident::new("mytable") - ])), - }), - filter_position: None, - limit_from: None, - limit: None, - starts_with: None, - } + table_name: ObjectName(vec![Ident::new("mydb"), Ident::new("mytable")]), + filter: None, } ); assert_eq!( @@ -286,17 +247,8 @@ fn parse_show_columns() { Statement::ShowColumns { extended: true, full: false, - show_options: ShowStatementOptions { - show_in: Some(ShowStatementIn { - clause: ShowStatementInClause::FROM, - parent_type: None, - parent_name: Some(ObjectName::from(vec![Ident::new("mytable")])), - }), - filter_position: None, - limit_from: None, - limit: None, - starts_with: None, - } + table_name: table_name.clone(), + filter: None, } ); assert_eq!( @@ -304,17 +256,8 @@ fn parse_show_columns() { Statement::ShowColumns { extended: false, full: true, - show_options: ShowStatementOptions { - show_in: Some(ShowStatementIn { - clause: ShowStatementInClause::FROM, - parent_type: None, - parent_name: Some(ObjectName::from(vec![Ident::new("mytable")])), - }), - filter_position: None, - limit_from: None, - limit: None, - starts_with: None, - } + table_name: table_name.clone(), + filter: None, } ); assert_eq!( @@ -322,19 +265,8 @@ fn parse_show_columns() { Statement::ShowColumns { extended: false, full: false, - show_options: ShowStatementOptions { - show_in: Some(ShowStatementIn { - clause: ShowStatementInClause::FROM, - parent_type: None, - parent_name: Some(ObjectName::from(vec![Ident::new("mytable")])), - }), - filter_position: Some(ShowStatementFilterPosition::Suffix( - ShowStatementFilter::Like("pattern".into()) - )), - limit_from: None, - limit: None, - starts_with: None, - } + table_name: table_name.clone(), + filter: Some(ShowStatementFilter::Like("pattern".into())), } ); assert_eq!( @@ -342,27 +274,18 @@ fn parse_show_columns() { Statement::ShowColumns { extended: false, full: false, - show_options: ShowStatementOptions { - show_in: Some(ShowStatementIn { - clause: ShowStatementInClause::FROM, - parent_type: None, - parent_name: Some(ObjectName::from(vec![Ident::new("mytable")])), - }), - filter_position: Some(ShowStatementFilterPosition::Suffix( - ShowStatementFilter::Where(mysql_and_generic().verified_expr("1 = 2")) - )), - limit_from: None, - limit: None, - starts_with: None, - } + table_name, + filter: Some(ShowStatementFilter::Where( + mysql_and_generic().verified_expr("1 = 2") + )), } ); mysql_and_generic() .one_statement_parses_to("SHOW FIELDS FROM mytable", "SHOW COLUMNS FROM mytable"); mysql_and_generic() - .one_statement_parses_to("SHOW COLUMNS IN mytable", "SHOW COLUMNS IN mytable"); + .one_statement_parses_to("SHOW COLUMNS IN mytable", "SHOW COLUMNS FROM mytable"); mysql_and_generic() - .one_statement_parses_to("SHOW FIELDS IN mytable", "SHOW COLUMNS IN mytable"); + .one_statement_parses_to("SHOW FIELDS IN mytable", "SHOW COLUMNS FROM mytable"); mysql_and_generic().one_statement_parses_to( "SHOW COLUMNS FROM mytable FROM mydb", "SHOW COLUMNS FROM mydb.mytable", @@ -404,111 +327,63 @@ fn parse_show_tables() { assert_eq!( mysql_and_generic().verified_stmt("SHOW TABLES"), Statement::ShowTables { - terse: false, - history: false, extended: false, full: false, - external: false, - show_options: ShowStatementOptions { - starts_with: None, - limit: None, - limit_from: None, - show_in: None, - filter_position: None - } + clause: None, + db_name: None, + filter: None, } ); assert_eq!( mysql_and_generic().verified_stmt("SHOW TABLES FROM mydb"), Statement::ShowTables { - terse: false, - history: false, extended: false, full: false, - external: false, - show_options: ShowStatementOptions { - starts_with: None, - limit: None, - limit_from: None, - show_in: Some(ShowStatementIn { - clause: ShowStatementInClause::FROM, - parent_type: None, - parent_name: Some(ObjectName::from(vec![Ident::new("mydb")])), - }), - filter_position: None - } + clause: Some(ShowClause::FROM), + db_name: Some(Ident::new("mydb")), + filter: None, } ); assert_eq!( mysql_and_generic().verified_stmt("SHOW EXTENDED TABLES"), Statement::ShowTables { - terse: false, - history: false, extended: true, full: false, - external: false, - show_options: ShowStatementOptions { - starts_with: None, - limit: None, - limit_from: None, - show_in: None, - filter_position: None - } + clause: None, + db_name: None, + filter: None, } ); assert_eq!( mysql_and_generic().verified_stmt("SHOW FULL TABLES"), Statement::ShowTables { - terse: false, - history: false, extended: false, full: true, - external: false, - show_options: ShowStatementOptions { - starts_with: None, - limit: None, - limit_from: None, - show_in: None, - filter_position: None - } + clause: None, + db_name: None, + filter: None, } ); assert_eq!( mysql_and_generic().verified_stmt("SHOW TABLES LIKE 'pattern'"), Statement::ShowTables { - terse: false, - history: false, extended: false, full: false, - external: false, - show_options: ShowStatementOptions { - starts_with: None, - limit: None, - limit_from: None, - show_in: None, - filter_position: Some(ShowStatementFilterPosition::Suffix( - ShowStatementFilter::Like("pattern".into()) - )) - } + clause: None, + db_name: None, + filter: Some(ShowStatementFilter::Like("pattern".into())), } ); assert_eq!( mysql_and_generic().verified_stmt("SHOW TABLES WHERE 1 = 2"), Statement::ShowTables { - terse: false, - history: false, extended: false, full: false, - external: false, - show_options: ShowStatementOptions { - starts_with: None, - limit: None, - limit_from: None, - show_in: None, - filter_position: Some(ShowStatementFilterPosition::Suffix( - ShowStatementFilter::Where(mysql_and_generic().verified_expr("1 = 2")) - )) - } + clause: None, + db_name: None, + filter: Some(ShowStatementFilter::Where( + mysql_and_generic().verified_expr("1 = 2") + )), } ); mysql_and_generic().verified_stmt("SHOW TABLES IN mydb"); @@ -537,7 +412,7 @@ fn parse_show_extended_full() { #[test] fn parse_show_create() { - let obj_name = ObjectName::from(vec![Ident::new("myident")]); + let obj_name = ObjectName(vec![Ident::new("myident")]); for obj_type in &[ ShowCreateObject::Table, @@ -593,16 +468,17 @@ fn parse_use() { for object_name in &valid_object_names { // Test single identifier without quotes assert_eq!( - mysql_and_generic().verified_stmt(&format!("USE {object_name}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( + mysql_and_generic().verified_stmt(&format!("USE {}", object_name)), + Statement::Use(Use::Object(ObjectName(vec![Ident::new( object_name.to_string() )]))) ); for "e in "e_styles { // Test single identifier with different type of quotes assert_eq!( - mysql_and_generic().verified_stmt(&format!("USE {quote}{object_name}{quote}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( + mysql_and_generic() + .verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)), + Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -616,12 +492,12 @@ fn parse_set_variables() { mysql_and_generic().verified_stmt("SET sql_mode = CONCAT(@@sql_mode, ',STRICT_TRANS_TABLES')"); assert_eq!( mysql_and_generic().verified_stmt("SET LOCAL autocommit = 1"), - Statement::Set(Set::SingleAssignment { - scope: Some(ContextModifier::Local), + Statement::SetVariable { + local: true, hivevar: false, - variable: ObjectName::from(vec!["autocommit".into()]), - values: vec![Expr::value(number("1"))], - }) + variables: OneOrManyWithParens::One(ObjectName(vec!["autocommit".into()])), + value: vec![Expr::Value(number("1"))], + } ); } @@ -635,6 +511,7 @@ fn parse_create_table_auto_increment() { vec![ColumnDef { name: Ident::new("bar"), data_type: DataType::Int(None), + collation: None, options: vec![ ColumnOptionDef { name: None, @@ -669,20 +546,6 @@ fn table_constraint_unique_primary_ctor( characteristics: Option, unique_index_type_display: Option, ) -> TableConstraint { - let columns = columns - .into_iter() - .map(|ident| IndexColumn { - column: OrderByExpr { - expr: Expr::Identifier(ident), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, - with_fill: None, - }, - operator_class: None, - }) - .collect(); match unique_index_type_display { Some(index_type_display) => TableConstraint::Unique { name, @@ -692,7 +555,6 @@ fn table_constraint_unique_primary_ctor( columns, index_options, characteristics, - nulls_distinct: NullsDistinctOption::None, }, None => TableConstraint::PrimaryKey { name, @@ -708,7 +570,7 @@ fn table_constraint_unique_primary_ctor( #[test] fn parse_create_table_primary_and_unique_key() { let sqls = ["UNIQUE KEY", "PRIMARY KEY"] - .map(|key_ty| format!("CREATE TABLE foo (id INT PRIMARY KEY AUTO_INCREMENT, bar INT NOT NULL, CONSTRAINT bar_key {key_ty} (bar))")); + .map(|key_ty|format!("CREATE TABLE foo (id INT PRIMARY KEY AUTO_INCREMENT, bar INT NOT NULL, CONSTRAINT bar_key {key_ty} (bar))")); let index_type_display = [Some(KeyOrIndexDisplay::Key), None]; @@ -738,6 +600,7 @@ fn parse_create_table_primary_and_unique_key() { ColumnDef { name: Ident::new("id"), data_type: DataType::Int(None), + collation: None, options: vec![ ColumnOptionDef { name: None, @@ -757,6 +620,7 @@ fn parse_create_table_primary_and_unique_key() { ColumnDef { name: Ident::new("bar"), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -774,7 +638,7 @@ fn parse_create_table_primary_and_unique_key() { #[test] fn parse_create_table_primary_and_unique_key_with_index_options() { let sqls = ["UNIQUE INDEX", "PRIMARY KEY"] - .map(|key_ty| format!("CREATE TABLE foo (bar INT, var INT, CONSTRAINT constr {key_ty} index_name (bar, var) USING HASH COMMENT 'yes, ' USING BTREE COMMENT 'MySQL allows')")); + .map(|key_ty|format!("CREATE TABLE foo (bar INT, var INT, CONSTRAINT constr {key_ty} index_name (bar, var) USING HASH COMMENT 'yes, ' USING BTREE COMMENT 'MySQL allows')")); let index_type_display = [Some(KeyOrIndexDisplay::Index), None]; @@ -808,67 +672,6 @@ fn parse_create_table_primary_and_unique_key_with_index_options() { } } -#[test] -fn parse_prefix_key_part() { - let expected = vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::value( - number("10"), - )))]; - for sql in [ - "CREATE INDEX idx_index ON t(textcol(10))", - "ALTER TABLE tab ADD INDEX idx_index (textcol(10))", - "ALTER TABLE tab ADD PRIMARY KEY (textcol(10))", - "ALTER TABLE tab ADD UNIQUE KEY (textcol(10))", - "ALTER TABLE tab ADD UNIQUE KEY (textcol(10))", - "ALTER TABLE tab ADD FULLTEXT INDEX (textcol(10))", - "CREATE TABLE t (textcol TEXT, INDEX idx_index (textcol(10)))", - ] { - match index_column(mysql_and_generic().verified_stmt(sql)) { - Expr::Function(Function { - name, - args: FunctionArguments::List(FunctionArgumentList { args, .. }), - .. - }) => { - assert_eq!(name.to_string(), "textcol"); - assert_eq!(args, expected); - } - expr => panic!("unexpected expression {expr} for {sql}"), - } - } -} - -#[test] -fn test_functional_key_part() { - assert_eq!( - index_column( - mysql_and_generic() - .verified_stmt("CREATE INDEX idx_index ON t((col COLLATE utf8mb4_bin) DESC)") - ), - Expr::Nested(Box::new(Expr::Collate { - expr: Box::new(Expr::Identifier("col".into())), - collation: ObjectName(vec![sqlparser::ast::ObjectNamePart::Identifier( - Ident::new("utf8mb4_bin") - )]), - })) - ); - assert_eq!( - index_column(mysql_and_generic().verified_stmt( - r#"CREATE TABLE t (jsoncol JSON, PRIMARY KEY ((CAST(col ->> '$.id' AS UNSIGNED)) ASC))"# - )), - Expr::Nested(Box::new(Expr::Cast { - kind: CastKind::Cast, - expr: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("col"))), - op: BinaryOperator::LongArrow, - right: Box::new(Expr::Value( - Value::SingleQuotedString("$.id".to_string()).with_empty_span() - )), - }), - data_type: DataType::Unsigned, - format: None, - })), - ); -} - #[test] fn parse_create_table_primary_and_unique_key_with_index_type() { let sqls = ["UNIQUE", "PRIMARY KEY"].map(|key_ty| { @@ -909,7 +712,7 @@ fn parse_create_table_primary_and_unique_key_with_index_type() { #[test] fn parse_create_table_primary_and_unique_key_characteristic_test() { let sqls = ["UNIQUE INDEX", "PRIMARY KEY"] - .map(|key_ty| format!("CREATE TABLE x (y INT, CONSTRAINT constr {key_ty} (y) NOT DEFERRABLE INITIALLY IMMEDIATE)")); + .map(|key_ty|format!("CREATE TABLE x (y INT, CONSTRAINT constr {key_ty} (y) NOT DEFERRABLE INITIALLY IMMEDIATE)")); for sql in &sqls { mysql_and_generic().verified_stmt(sql); } @@ -922,23 +725,9 @@ fn parse_create_table_comment() { for sql in [without_equal, with_equal] { match mysql().verified_stmt(sql) { - Statement::CreateTable(CreateTable { - name, - table_options, - .. - }) => { + Statement::CreateTable(CreateTable { name, comment, .. }) => { assert_eq!(name.to_string(), "foo"); - - let plain_options = match table_options { - CreateTableOptions::Plain(options) => options, - _ => unreachable!(), - }; - let comment = match plain_options.first().unwrap() { - SqlOption::Comment(CommentDef::WithEq(c)) - | SqlOption::Comment(CommentDef::WithoutEq(c)) => c, - _ => unreachable!(), - }; - assert_eq!(comment, "baz"); + assert_eq!(comment.expect("Should exist").to_string(), "baz"); } _ => unreachable!(), } @@ -947,226 +736,29 @@ fn parse_create_table_comment() { #[test] fn parse_create_table_auto_increment_offset() { - let sql = - "CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE = InnoDB AUTO_INCREMENT = 123"; + let canonical = + "CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT 123"; + let with_equal = + "CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT=123"; - match mysql().verified_stmt(sql) { - Statement::CreateTable(CreateTable { - name, - table_options, - .. - }) => { - assert_eq!(name.to_string(), "foo"); - - let plain_options = match table_options { - CreateTableOptions::Plain(options) => options, - _ => unreachable!(), - }; - - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("AUTO_INCREMENT"), - value: Expr::Value(test_utils::number("123").with_empty_span()) - })); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_create_table_multiple_options_order_independent() { - let sql1 = "CREATE TABLE mytable (id INT) ENGINE=InnoDB ROW_FORMAT=DYNAMIC KEY_BLOCK_SIZE=8 COMMENT='abc'"; - let sql2 = "CREATE TABLE mytable (id INT) KEY_BLOCK_SIZE=8 COMMENT='abc' ENGINE=InnoDB ROW_FORMAT=DYNAMIC"; - let sql3 = "CREATE TABLE mytable (id INT) ROW_FORMAT=DYNAMIC KEY_BLOCK_SIZE=8 COMMENT='abc' ENGINE=InnoDB"; - - for sql in [sql1, sql2, sql3] { - match mysql().parse_sql_statements(sql).unwrap().pop().unwrap() { + for sql in [canonical, with_equal] { + match mysql().one_statement_parses_to(sql, canonical) { Statement::CreateTable(CreateTable { name, - table_options, + auto_increment_offset, .. }) => { - assert_eq!(name.to_string(), "mytable"); - - let plain_options = match table_options { - CreateTableOptions::Plain(options) => options, - _ => unreachable!(), - }; - - assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( - NamedParenthesizedList { - key: Ident::new("ENGINE"), - name: Some(Ident::new("InnoDB")), - values: vec![] - } - ))); - - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("KEY_BLOCK_SIZE"), - value: Expr::Value(test_utils::number("8").with_empty_span()) - })); - - assert!(plain_options - .contains(&SqlOption::Comment(CommentDef::WithEq("abc".to_owned())))); - - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("ROW_FORMAT"), - value: Expr::Identifier(Ident::new("DYNAMIC".to_owned())) - })); + assert_eq!(name.to_string(), "foo"); + assert_eq!( + auto_increment_offset.expect("Should exist").to_string(), + "123" + ); } _ => unreachable!(), } } } -#[test] -fn parse_create_table_with_all_table_options() { - let sql = - "CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE = InnoDB AUTO_INCREMENT = 123 DEFAULT CHARSET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci INSERT_METHOD = FIRST KEY_BLOCK_SIZE = 8 ROW_FORMAT = DYNAMIC DATA DIRECTORY = '/var/lib/mysql/data' INDEX DIRECTORY = '/var/lib/mysql/index' PACK_KEYS = 1 STATS_AUTO_RECALC = 1 STATS_PERSISTENT = 0 STATS_SAMPLE_PAGES = 128 DELAY_KEY_WRITE = 1 COMPRESSION = 'ZLIB' ENCRYPTION = 'Y' MAX_ROWS = 10000 MIN_ROWS = 10 AUTOEXTEND_SIZE = 64 AVG_ROW_LENGTH = 128 CHECKSUM = 1 CONNECTION = 'mysql://localhost' ENGINE_ATTRIBUTE = 'primary' PASSWORD = 'secure_password' SECONDARY_ENGINE_ATTRIBUTE = 'secondary_attr' START TRANSACTION TABLESPACE my_tablespace STORAGE DISK UNION = (table1, table2, table3)"; - - match mysql().verified_stmt(sql) { - Statement::CreateTable(CreateTable { - name, - table_options, - .. - }) => { - assert_eq!(name, vec![Ident::new("foo".to_owned())].into()); - - let plain_options = match table_options { - CreateTableOptions::Plain(options) => options, - _ => unreachable!(), - }; - - assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( - NamedParenthesizedList { - key: Ident::new("ENGINE"), - name: Some(Ident::new("InnoDB")), - values: vec![] - } - ))); - - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("COLLATE"), - value: Expr::Identifier(Ident::new("utf8mb4_0900_ai_ci".to_owned())) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("DEFAULT CHARSET"), - value: Expr::Identifier(Ident::new("utf8mb4".to_owned())) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("AUTO_INCREMENT"), - value: Expr::value(test_utils::number("123")) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("KEY_BLOCK_SIZE"), - value: Expr::value(test_utils::number("8")) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("ROW_FORMAT"), - value: Expr::Identifier(Ident::new("DYNAMIC".to_owned())) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("PACK_KEYS"), - value: Expr::value(test_utils::number("1")) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("STATS_AUTO_RECALC"), - value: Expr::value(test_utils::number("1")) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("STATS_PERSISTENT"), - value: Expr::value(test_utils::number("0")) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("STATS_SAMPLE_PAGES"), - value: Expr::value(test_utils::number("128")) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("STATS_SAMPLE_PAGES"), - value: Expr::value(test_utils::number("128")) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("INSERT_METHOD"), - value: Expr::Identifier(Ident::new("FIRST".to_owned())) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("COMPRESSION"), - value: Expr::value(Value::SingleQuotedString("ZLIB".to_owned())) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("ENCRYPTION"), - value: Expr::value(Value::SingleQuotedString("Y".to_owned())) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("MAX_ROWS"), - value: Expr::value(test_utils::number("10000")) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("MIN_ROWS"), - value: Expr::value(test_utils::number("10")) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("AUTOEXTEND_SIZE"), - value: Expr::value(test_utils::number("64")) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("AVG_ROW_LENGTH"), - value: Expr::value(test_utils::number("128")) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("CHECKSUM"), - value: Expr::value(test_utils::number("1")) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("CONNECTION"), - value: Expr::value(Value::SingleQuotedString("mysql://localhost".to_owned())) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("ENGINE_ATTRIBUTE"), - value: Expr::value(Value::SingleQuotedString("primary".to_owned())) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("PASSWORD"), - value: Expr::value(Value::SingleQuotedString("secure_password".to_owned())) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("SECONDARY_ENGINE_ATTRIBUTE"), - value: Expr::value(Value::SingleQuotedString("secondary_attr".to_owned())) - })); - assert!(plain_options.contains(&SqlOption::Ident(Ident::new( - "START TRANSACTION".to_owned() - )))); - assert!( - plain_options.contains(&SqlOption::TableSpace(TablespaceOption { - name: "my_tablespace".to_string(), - storage: Some(StorageType::Disk), - })) - ); - - assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( - NamedParenthesizedList { - key: Ident::new("UNION"), - name: None, - values: vec![ - Ident::new("table1".to_string()), - Ident::new("table2".to_string()), - Ident::new("table3".to_string()) - ] - } - ))); - - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("DATA DIRECTORY"), - value: Expr::value(Value::SingleQuotedString("/var/lib/mysql/data".to_owned())) - })); - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("INDEX DIRECTORY"), - value: Expr::value(Value::SingleQuotedString("/var/lib/mysql/index".to_owned())) - })); - } - _ => unreachable!(), - } -} - #[test] fn parse_create_table_set_enum() { let sql = "CREATE TABLE foo (bar SET('a', 'b'), baz ENUM('a', 'b'))"; @@ -1178,17 +770,13 @@ fn parse_create_table_set_enum() { ColumnDef { name: Ident::new("bar"), data_type: DataType::Set(vec!["a".to_string(), "b".to_string()]), + collation: None, options: vec![], }, ColumnDef { name: Ident::new("baz"), - data_type: DataType::Enum( - vec![ - EnumMember::Name("a".to_string()), - EnumMember::Name("b".to_string()) - ], - None - ), + data_type: DataType::Enum(vec!["a".to_string(), "b".to_string()]), + collation: None, options: vec![], } ], @@ -1201,12 +789,13 @@ fn parse_create_table_set_enum() { #[test] fn parse_create_table_engine_default_charset() { - let sql = "CREATE TABLE foo (id INT(11)) ENGINE = InnoDB DEFAULT CHARSET = utf8mb3"; + let sql = "CREATE TABLE foo (id INT(11)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3"; match mysql().verified_stmt(sql) { Statement::CreateTable(CreateTable { name, columns, - table_options, + engine, + default_charset, .. }) => { assert_eq!(name.to_string(), "foo"); @@ -1214,28 +803,19 @@ fn parse_create_table_engine_default_charset() { vec![ColumnDef { name: Ident::new("id"), data_type: DataType::Int(Some(11)), + collation: None, options: vec![], },], columns ); - - let plain_options = match table_options { - CreateTableOptions::Plain(options) => options, - _ => unreachable!(), - }; - - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("DEFAULT CHARSET"), - value: Expr::Identifier(Ident::new("utf8mb3".to_owned())) - })); - - assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( - NamedParenthesizedList { - key: Ident::new("ENGINE"), - name: Some(Ident::new("InnoDB")), - values: vec![] - } - ))); + assert_eq!( + engine, + Some(TableEngine { + name: "InnoDB".to_string(), + parameters: None + }) + ); + assert_eq!(default_charset, Some("utf8mb3".to_string())); } _ => unreachable!(), } @@ -1243,12 +823,12 @@ fn parse_create_table_engine_default_charset() { #[test] fn parse_create_table_collate() { - let sql = "CREATE TABLE foo (id INT(11)) COLLATE = utf8mb4_0900_ai_ci"; + let sql = "CREATE TABLE foo (id INT(11)) COLLATE=utf8mb4_0900_ai_ci"; match mysql().verified_stmt(sql) { Statement::CreateTable(CreateTable { name, columns, - table_options, + collation, .. }) => { assert_eq!(name.to_string(), "foo"); @@ -1256,20 +836,12 @@ fn parse_create_table_collate() { vec![ColumnDef { name: Ident::new("id"), data_type: DataType::Int(Some(11)), + collation: None, options: vec![], },], columns ); - - let plain_options = match table_options { - CreateTableOptions::Plain(options) => options, - _ => unreachable!(), - }; - - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("COLLATE"), - value: Expr::Identifier(Ident::new("utf8mb4_0900_ai_ci".to_owned())) - })); + assert_eq!(collation, Some("utf8mb4_0900_ai_ci".to_string())); } _ => unreachable!(), } @@ -1277,38 +849,25 @@ fn parse_create_table_collate() { #[test] fn parse_create_table_both_options_and_as_query() { - let sql = "CREATE TABLE foo (id INT(11)) ENGINE = InnoDB DEFAULT CHARSET = utf8mb3 COLLATE = utf8mb4_0900_ai_ci AS SELECT 1"; + let sql = "CREATE TABLE foo (id INT(11)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb4_0900_ai_ci AS SELECT 1"; match mysql_and_generic().verified_stmt(sql) { Statement::CreateTable(CreateTable { name, + collation, query, - table_options, .. }) => { assert_eq!(name.to_string(), "foo"); - - let plain_options = match table_options { - CreateTableOptions::Plain(options) => options, - _ => unreachable!(), - }; - - assert!(plain_options.contains(&SqlOption::KeyValue { - key: Ident::new("COLLATE"), - value: Expr::Identifier(Ident::new("utf8mb4_0900_ai_ci".to_owned())) - })); - + assert_eq!(collation, Some("utf8mb4_0900_ai_ci".to_string())); assert_eq!( query.unwrap().body.as_select().unwrap().projection, - vec![SelectItem::UnnamedExpr(Expr::Value( - (number("1")).with_empty_span() - ))] + vec![SelectItem::UnnamedExpr(Expr::Value(number("1")))] ); } _ => unreachable!(), } - let sql = - r"CREATE TABLE foo (id INT(11)) ENGINE = InnoDB AS SELECT 1 DEFAULT CHARSET = utf8mb3"; + let sql = r"CREATE TABLE foo (id INT(11)) ENGINE=InnoDB AS SELECT 1 DEFAULT CHARSET=utf8mb3"; assert!(matches!( mysql_and_generic().parse_sql_statements(sql), Err(ParserError::ParserError(_)) @@ -1325,10 +884,11 @@ fn parse_create_table_comment_character_set() { vec![ColumnDef { name: Ident::new("s"), data_type: DataType::Text, + collation: None, options: vec![ ColumnOptionDef { name: None, - option: ColumnOption::CharacterSet(ObjectName::from(vec![Ident::new( + option: ColumnOption::CharacterSet(ObjectName(vec![Ident::new( "utf8mb4" )])) }, @@ -1371,6 +931,7 @@ fn parse_quote_identifiers() { vec![ColumnDef { name: Ident::with_quote('`', "BEGIN"), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Unique { @@ -1394,16 +955,13 @@ fn parse_escaped_quote_identifiers_with_escape() { Statement::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident { value: "quoted ` identifier".into(), quote_style: Some('`'), - span: Span::empty(), }))], - exclude: None, into: None, from: vec![], lateral_views: vec![], @@ -1419,16 +977,16 @@ fn parse_escaped_quote_identifiers_with_escape() { window_before_qualify: false, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })) ); } @@ -1442,23 +1000,19 @@ fn parse_escaped_quote_identifiers_with_no_escape() { ParserOptions { trailing_commas: false, unescape: false, - require_semicolon_stmt_delimiter: true, } ) .verified_stmt(sql), Statement::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident { value: "quoted `` identifier".into(), quote_style: Some('`'), - span: Span::empty(), }))], - exclude: None, into: None, from: vec![], lateral_views: vec![], @@ -1474,16 +1028,16 @@ fn parse_escaped_quote_identifiers_with_no_escape() { window_before_qualify: false, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })) ); } @@ -1496,17 +1050,13 @@ fn parse_escaped_backticks_with_escape() { Statement::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), - distinct: None, top: None, top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident { value: "`quoted identifier`".into(), quote_style: Some('`'), - span: Span::empty(), }))], - exclude: None, into: None, from: vec![], lateral_views: vec![], @@ -1522,16 +1072,16 @@ fn parse_escaped_backticks_with_escape() { window_before_qualify: false, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })) ); } @@ -1548,17 +1098,13 @@ fn parse_escaped_backticks_with_no_escape() { Statement::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), - distinct: None, top: None, top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident { value: "``quoted identifier``".into(), quote_style: Some('`'), - span: Span::empty(), }))], - exclude: None, into: None, from: vec![], lateral_views: vec![], @@ -1574,16 +1120,16 @@ fn parse_escaped_backticks_with_no_escape() { window_before_qualify: false, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })) ); } @@ -1634,26 +1180,31 @@ fn parse_create_table_with_minimum_display_width() { ColumnDef { name: Ident::new("bar_tinyint"), data_type: DataType::TinyInt(Some(3)), + collation: None, options: vec![], }, ColumnDef { name: Ident::new("bar_smallint"), data_type: DataType::SmallInt(Some(5)), + collation: None, options: vec![], }, ColumnDef { name: Ident::new("bar_mediumint"), data_type: DataType::MediumInt(Some(6)), + collation: None, options: vec![], }, ColumnDef { name: Ident::new("bar_int"), data_type: DataType::Int(Some(11)), + collation: None, options: vec![], }, ColumnDef { name: Ident::new("bar_bigint"), data_type: DataType::BigInt(Some(20)), + collation: None, options: vec![], } ], @@ -1674,27 +1225,32 @@ fn parse_create_table_unsigned() { vec![ ColumnDef { name: Ident::new("bar_tinyint"), - data_type: DataType::TinyIntUnsigned(Some(3)), + data_type: DataType::UnsignedTinyInt(Some(3)), + collation: None, options: vec![], }, ColumnDef { name: Ident::new("bar_smallint"), - data_type: DataType::SmallIntUnsigned(Some(5)), + data_type: DataType::UnsignedSmallInt(Some(5)), + collation: None, options: vec![], }, ColumnDef { name: Ident::new("bar_mediumint"), - data_type: DataType::MediumIntUnsigned(Some(13)), + data_type: DataType::UnsignedMediumInt(Some(13)), + collation: None, options: vec![], }, ColumnDef { name: Ident::new("bar_int"), - data_type: DataType::IntUnsigned(Some(11)), + data_type: DataType::UnsignedInt(Some(11)), + collation: None, options: vec![], }, ColumnDef { name: Ident::new("bar_bigint"), - data_type: DataType::BigIntUnsigned(Some(20)), + data_type: DataType::UnsignedBigInt(Some(20)), + collation: None, options: vec![], }, ], @@ -1711,16 +1267,13 @@ fn parse_simple_insert() { match mysql().verified_stmt(sql) { Statement::Insert(Insert { - table: table_name, + table_name, columns, source, on, .. }) => { - assert_eq!( - TableObject::TableName(ObjectName::from(vec![Ident::new("tasks")])), - table_name - ); + assert_eq!(ObjectName(vec![Ident::new("tasks")]), table_name); assert_eq!(vec![Ident::new("title"), Ident::new("priority")], columns); assert!(on.is_none()); assert_eq!( @@ -1730,36 +1283,30 @@ fn parse_simple_insert() { explicit_row: false, rows: vec![ vec![ - Expr::Value( - (Value::SingleQuotedString("Test Some Inserts".to_string())) - .with_empty_span() - ), - Expr::value(number("1")) + Expr::Value(Value::SingleQuotedString( + "Test Some Inserts".to_string() + )), + Expr::Value(number("1")) ], vec![ - Expr::Value( - (Value::SingleQuotedString("Test Entry 2".to_string())) - .with_empty_span() - ), - Expr::value(number("2")) + Expr::Value(Value::SingleQuotedString("Test Entry 2".to_string())), + Expr::Value(number("2")) ], vec![ - Expr::Value( - (Value::SingleQuotedString("Test Entry 3".to_string())) - .with_empty_span() - ), - Expr::value(number("3")) + Expr::Value(Value::SingleQuotedString("Test Entry 3".to_string())), + Expr::Value(number("3")) ] ] })), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), source ); @@ -1774,17 +1321,14 @@ fn parse_ignore_insert() { match mysql_and_generic().verified_stmt(sql) { Statement::Insert(Insert { - table: table_name, + table_name, columns, source, on, ignore, .. }) => { - assert_eq!( - TableObject::TableName(ObjectName::from(vec![Ident::new("tasks")])), - table_name - ); + assert_eq!(ObjectName(vec![Ident::new("tasks")]), table_name); assert_eq!(vec![Ident::new("title"), Ident::new("priority")], columns); assert!(on.is_none()); assert!(ignore); @@ -1794,21 +1338,19 @@ fn parse_ignore_insert() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value( - (Value::SingleQuotedString("Test Some Inserts".to_string())) - .with_empty_span() - ), - Expr::value(number("1")) + Expr::Value(Value::SingleQuotedString("Test Some Inserts".to_string())), + Expr::Value(number("1")) ]] })), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), source ); @@ -1823,17 +1365,14 @@ fn parse_priority_insert() { match mysql_and_generic().verified_stmt(sql) { Statement::Insert(Insert { - table: table_name, + table_name, columns, source, on, priority, .. }) => { - assert_eq!( - TableObject::TableName(ObjectName::from(vec![Ident::new("tasks")])), - table_name - ); + assert_eq!(ObjectName(vec![Ident::new("tasks")]), table_name); assert_eq!(vec![Ident::new("title"), Ident::new("priority")], columns); assert!(on.is_none()); assert_eq!(priority, Some(HighPriority)); @@ -1843,21 +1382,19 @@ fn parse_priority_insert() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value( - (Value::SingleQuotedString("Test Some Inserts".to_string())) - .with_empty_span() - ), - Expr::value(number("1")) + Expr::Value(Value::SingleQuotedString("Test Some Inserts".to_string())), + Expr::Value(number("1")) ]] })), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), source ); @@ -1869,17 +1406,14 @@ fn parse_priority_insert() { match mysql().verified_stmt(sql2) { Statement::Insert(Insert { - table: table_name, + table_name, columns, source, on, priority, .. }) => { - assert_eq!( - TableObject::TableName(ObjectName::from(vec![Ident::new("tasks")])), - table_name - ); + assert_eq!(ObjectName(vec![Ident::new("tasks")]), table_name); assert_eq!(vec![Ident::new("title"), Ident::new("priority")], columns); assert!(on.is_none()); assert_eq!(priority, Some(LowPriority)); @@ -1889,21 +1423,19 @@ fn parse_priority_insert() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value( - (Value::SingleQuotedString("Test Some Inserts".to_string())) - .with_empty_span() - ), - Expr::value(number("1")) + Expr::Value(Value::SingleQuotedString("Test Some Inserts".to_string())), + Expr::Value(number("1")) ]] })), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), source ); @@ -1917,21 +1449,21 @@ fn parse_insert_as() { let sql = r"INSERT INTO `table` (`date`) VALUES ('2024-01-01') AS `alias`"; match mysql_and_generic().verified_stmt(sql) { Statement::Insert(Insert { - table: table_name, + table_name, columns, source, insert_alias, .. }) => { assert_eq!( - TableObject::TableName(ObjectName::from(vec![Ident::with_quote('`', "table")])), + ObjectName(vec![Ident::with_quote('`', "table")]), table_name ); assert_eq!(vec![Ident::with_quote('`', "date")], columns); let insert_alias = insert_alias.unwrap(); assert_eq!( - ObjectName::from(vec![Ident::with_quote('`', "alias")]), + ObjectName(vec![Ident::with_quote('`', "alias")]), insert_alias.row_alias ); assert_eq!(Some(vec![]), insert_alias.col_aliases); @@ -1940,18 +1472,19 @@ fn parse_insert_as() { with: None, body: Box::new(SetExpr::Values(Values { explicit_row: false, - rows: vec![vec![Expr::Value( - (Value::SingleQuotedString("2024-01-01".to_string())).with_empty_span() - )]] + rows: vec![vec![Expr::Value(Value::SingleQuotedString( + "2024-01-01".to_string() + ))]] })), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), source ); @@ -1968,14 +1501,14 @@ fn parse_insert_as() { let sql = r"INSERT INTO `table` (`id`, `date`) VALUES (1, '2024-01-01') AS `alias` (`mek_id`, `mek_date`)"; match mysql_and_generic().verified_stmt(sql) { Statement::Insert(Insert { - table: table_name, + table_name, columns, source, insert_alias, .. }) => { assert_eq!( - TableObject::TableName(ObjectName::from(vec![Ident::with_quote('`', "table")])), + ObjectName(vec![Ident::with_quote('`', "table")]), table_name ); assert_eq!( @@ -1984,7 +1517,7 @@ fn parse_insert_as() { ); let insert_alias = insert_alias.unwrap(); assert_eq!( - ObjectName::from(vec![Ident::with_quote('`', "alias")]), + ObjectName(vec![Ident::with_quote('`', "alias")]), insert_alias.row_alias ); assert_eq!( @@ -2000,21 +1533,19 @@ fn parse_insert_as() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::value(number("1")), - Expr::Value( - (Value::SingleQuotedString("2024-01-01".to_string())) - .with_empty_span() - ) + Expr::Value(number("1")), + Expr::Value(Value::SingleQuotedString("2024-01-01".to_string())) ]] })), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), source ); @@ -2028,7 +1559,7 @@ fn parse_replace_insert() { let sql = r"REPLACE DELAYED INTO tasks (title, priority) VALUES ('Test Some Inserts', 1)"; match mysql().verified_stmt(sql) { Statement::Insert(Insert { - table: table_name, + table_name, columns, source, on, @@ -2036,10 +1567,7 @@ fn parse_replace_insert() { priority, .. }) => { - assert_eq!( - TableObject::TableName(ObjectName::from(vec![Ident::new("tasks")])), - table_name - ); + assert_eq!(ObjectName(vec![Ident::new("tasks")]), table_name); assert_eq!(vec![Ident::new("title"), Ident::new("priority")], columns); assert!(on.is_none()); assert!(replace_into); @@ -2050,21 +1578,19 @@ fn parse_replace_insert() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value( - (Value::SingleQuotedString("Test Some Inserts".to_string())) - .with_empty_span() - ), - Expr::value(number("1")) + Expr::Value(Value::SingleQuotedString("Test Some Inserts".to_string())), + Expr::Value(number("1")) ]] })), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), source ); @@ -2079,16 +1605,13 @@ fn parse_empty_row_insert() { match mysql().one_statement_parses_to(sql, "INSERT INTO tb VALUES (), ()") { Statement::Insert(Insert { - table: table_name, + table_name, columns, source, on, .. }) => { - assert_eq!( - TableObject::TableName(ObjectName::from(vec![Ident::new("tb")])), - table_name - ); + assert_eq!(ObjectName(vec![Ident::new("tb")]), table_name); assert!(columns.is_empty()); assert!(on.is_none()); assert_eq!( @@ -2099,13 +1622,14 @@ fn parse_empty_row_insert() { rows: vec![vec![], vec![]] })), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), source ); @@ -2120,14 +1644,14 @@ fn parse_insert_with_on_duplicate_update() { match mysql().verified_stmt(sql) { Statement::Insert(Insert { - table: table_name, + table_name, columns, source, on, .. }) => { assert_eq!( - TableObject::TableName(ObjectName::from(vec![Ident::new("permission_groups")])), + ObjectName(vec![Ident::new("permission_groups")]), table_name ); assert_eq!( @@ -2147,61 +1671,58 @@ fn parse_insert_with_on_duplicate_update() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value( - (Value::SingleQuotedString("accounting_manager".to_string())) - .with_empty_span() - ), - Expr::Value( - (Value::SingleQuotedString( - "Some description about the group".to_string() - )) - .with_empty_span() - ), - Expr::Value((Value::Boolean(true)).with_empty_span()), - Expr::Value((Value::Boolean(true)).with_empty_span()), - Expr::Value((Value::Boolean(true)).with_empty_span()), - Expr::Value((Value::Boolean(true)).with_empty_span()), + Expr::Value(Value::SingleQuotedString( + "accounting_manager".to_string() + )), + Expr::Value(Value::SingleQuotedString( + "Some description about the group".to_string() + )), + Expr::Value(Value::Boolean(true)), + Expr::Value(Value::Boolean(true)), + Expr::Value(Value::Boolean(true)), + Expr::Value(Value::Boolean(true)), ]] })), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), source ); assert_eq!( Some(OnInsert::DuplicateKeyUpdate(vec![ Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new( + target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new( "description".to_string() )])), value: call("VALUES", [Expr::Identifier(Ident::new("description"))]), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new( + target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new( "perm_create".to_string() )])), value: call("VALUES", [Expr::Identifier(Ident::new("perm_create"))]), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new( + target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new( "perm_read".to_string() )])), value: call("VALUES", [Expr::Identifier(Ident::new("perm_read"))]), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new( + target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new( "perm_update".to_string() )])), value: call("VALUES", [Expr::Identifier(Ident::new("perm_update"))]), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new( + target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new( "perm_delete".to_string() )])), value: call("VALUES", [Expr::Identifier(Ident::new("perm_delete"))]), @@ -2222,20 +1743,23 @@ fn parse_select_with_numeric_prefix_column_name() { assert_eq!( q.body, Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), - distinct: None, top: None, top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident::new( "123col_$@123abc" )))], - exclude: None, into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident::with_quote( - '"', "table" - )])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident::with_quote('"', "table")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![] }], lateral_views: vec![], @@ -2251,7 +1775,6 @@ fn parse_select_with_numeric_prefix_column_name() { window_before_qualify: false, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))) ); } @@ -2259,128 +1782,6 @@ fn parse_select_with_numeric_prefix_column_name() { } } -#[test] -fn parse_qualified_identifiers_with_numeric_prefix() { - // Case 1: Qualified column name that starts with digits. - match mysql().verified_stmt("SELECT t.15to29 FROM my_table AS t") { - Statement::Query(q) => match *q.body { - SetExpr::Select(s) => match s.projection.last() { - Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => { - assert_eq!(&[Ident::new("t"), Ident::new("15to29")], &parts[..]); - } - proj => panic!("Unexpected projection: {proj:?}"), - }, - body => panic!("Unexpected statement body: {body:?}"), - }, - stmt => panic!("Unexpected statement: {stmt:?}"), - } - - // Case 2: Qualified column name that starts with digits and on its own represents a number. - match mysql().verified_stmt("SELECT t.15e29 FROM my_table AS t") { - Statement::Query(q) => match *q.body { - SetExpr::Select(s) => match s.projection.last() { - Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => { - assert_eq!(&[Ident::new("t"), Ident::new("15e29")], &parts[..]); - } - proj => panic!("Unexpected projection: {proj:?}"), - }, - body => panic!("Unexpected statement body: {body:?}"), - }, - stmt => panic!("Unexpected statement: {stmt:?}"), - } - - // Case 3: Unqualified, the same token is parsed as a number. - match mysql() - .parse_sql_statements("SELECT 15e29 FROM my_table") - .unwrap() - .pop() - { - Some(Statement::Query(q)) => match *q.body { - SetExpr::Select(s) => match s.projection.last() { - Some(SelectItem::UnnamedExpr(Expr::Value(ValueWithSpan { value, .. }))) => { - assert_eq!(&number("15e29"), value); - } - proj => panic!("Unexpected projection: {proj:?}"), - }, - body => panic!("Unexpected statement body: {body:?}"), - }, - stmt => panic!("Unexpected statement: {stmt:?}"), - } - - // Case 4: Quoted simple identifier. - match mysql().verified_stmt("SELECT `15e29` FROM my_table") { - Statement::Query(q) => match *q.body { - SetExpr::Select(s) => match s.projection.last() { - Some(SelectItem::UnnamedExpr(Expr::Identifier(name))) => { - assert_eq!(&Ident::with_quote('`', "15e29"), name); - } - proj => panic!("Unexpected projection: {proj:?}"), - }, - body => panic!("Unexpected statement body: {body:?}"), - }, - stmt => panic!("Unexpected statement: {stmt:?}"), - } - - // Case 5: Quoted compound identifier. - match mysql().verified_stmt("SELECT t.`15e29` FROM my_table AS t") { - Statement::Query(q) => match *q.body { - SetExpr::Select(s) => match s.projection.last() { - Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => { - assert_eq!( - &[Ident::new("t"), Ident::with_quote('`', "15e29")], - &parts[..] - ); - } - proj => panic!("Unexpected projection: {proj:?}"), - }, - body => panic!("Unexpected statement body: {body:?}"), - }, - stmt => panic!("Unexpected statement: {stmt:?}"), - } - - // Case 6: Multi-level compound identifiers. - match mysql().verified_stmt("SELECT 1db.1table.1column") { - Statement::Query(q) => match *q.body { - SetExpr::Select(s) => match s.projection.last() { - Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => { - assert_eq!( - &[ - Ident::new("1db"), - Ident::new("1table"), - Ident::new("1column") - ], - &parts[..] - ); - } - proj => panic!("Unexpected projection: {proj:?}"), - }, - body => panic!("Unexpected statement body: {body:?}"), - }, - stmt => panic!("Unexpected statement: {stmt:?}"), - } - - // Case 7: Multi-level compound quoted identifiers. - match mysql().verified_stmt("SELECT `1`.`2`.`3`") { - Statement::Query(q) => match *q.body { - SetExpr::Select(s) => match s.projection.last() { - Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => { - assert_eq!( - &[ - Ident::with_quote('`', "1"), - Ident::with_quote('`', "2"), - Ident::with_quote('`', "3") - ], - &parts[..] - ); - } - proj => panic!("Unexpected projection: {proj:?}"), - }, - body => panic!("Unexpected statement body: {body:?}"), - }, - stmt => panic!("Unexpected statement: {stmt:?}"), - } -} - // Don't run with bigdecimal as it fails like this on rust beta: // // 'parse_select_with_concatenation_of_exp_number_and_numeric_prefix_column' @@ -2397,20 +1798,24 @@ fn parse_select_with_concatenation_of_exp_number_and_numeric_prefix_column() { assert_eq!( q.body, Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, projection: vec![ - SelectItem::UnnamedExpr(Expr::value(number("123e4"))), + SelectItem::UnnamedExpr(Expr::Value(number("123e4"))), SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("123col_$@123abc"))) ], - exclude: None, into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident::with_quote( - '"', "table" - )])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident::with_quote('"', "table")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![] }], lateral_views: vec![], @@ -2426,7 +1831,6 @@ fn parse_select_with_concatenation_of_exp_number_and_numeric_prefix_column() { window_before_qualify: false, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))) ); } @@ -2439,12 +1843,12 @@ fn parse_insert_with_numeric_prefix_column_name() { let sql = "INSERT INTO s1.t1 (123col_$@length123) VALUES (67.654)"; match mysql().verified_stmt(sql) { Statement::Insert(Insert { - table: table_name, + table_name, columns, .. }) => { assert_eq!( - TableObject::TableName(ObjectName::from(vec![Ident::new("s1"), Ident::new("t1")])), + ObjectName(vec![Ident::new("s1"), Ident::new("t1")]), table_name ); assert_eq!(vec![Ident::new("123col_$@length123")], columns); @@ -2463,12 +1867,11 @@ fn parse_update_with_joins() { from: _from, selection, returning, - or: None, } => { assert_eq!( TableWithJoins { relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("orders")]), + name: ObjectName(vec![Ident::new("orders")]), alias: Some(TableAlias { name: Ident::new("o"), columns: vec![] @@ -2478,13 +1881,10 @@ fn parse_update_with_joins() { version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }, joins: vec![Join { relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("customers")]), + name: ObjectName(vec![Ident::new("customers")]), alias: Some(TableAlias { name: Ident::new("c"), columns: vec![] @@ -2494,12 +1894,9 @@ fn parse_update_with_joins() { version: None, partitions: vec![], with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], }, global: false, - join_operator: JoinOperator::Join(JoinConstraint::On(Expr::BinaryOp { + join_operator: JoinOperator::Inner(JoinConstraint::On(Expr::BinaryOp { left: Box::new(Expr::CompoundIdentifier(vec![ Ident::new("o"), Ident::new("customer_id") @@ -2516,11 +1913,11 @@ fn parse_update_with_joins() { ); assert_eq!( vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![ + target: AssignmentTarget::ColumnName(ObjectName(vec![ Ident::new("o"), Ident::new("completed") ])), - value: Expr::Value((Value::Boolean(true)).with_empty_span()) + value: Expr::Value(Value::Boolean(true)) }], assignments ); @@ -2531,9 +1928,7 @@ fn parse_update_with_joins() { Ident::new("firstname") ])), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("Peter".to_string())).with_empty_span() - )) + right: Box::new(Expr::Value(Value::SingleQuotedString("Peter".to_string()))) }), selection ); @@ -2552,13 +1947,10 @@ fn parse_delete_with_order_by() { vec![OrderByExpr { expr: Expr::Identifier(Ident { value: "id".to_owned(), - quote_style: None, - span: Span::empty(), + quote_style: None }), - options: OrderByOptions { - asc: Some(false), - nulls_first: None, - }, + asc: Some(false), + nulls_first: None, with_fill: None, }], order_by @@ -2573,7 +1965,7 @@ fn parse_delete_with_limit() { let sql = "DELETE FROM customers LIMIT 100"; match mysql().verified_stmt(sql) { Statement::Delete(Delete { limit, .. }) => { - assert_eq!(Some(Expr::value(number("100"))), limit); + assert_eq!(Some(Expr::Value(number("100"))), limit); } _ => unreachable!(), } @@ -2587,13 +1979,11 @@ fn parse_alter_table_add_column() { if_exists, only, operations, - iceberg, location: _, on_cluster: _, } => { assert_eq!(name.to_string(), "tab"); assert!(!if_exists); - assert!(!iceberg); assert!(!only); assert_eq!( operations, @@ -2603,6 +1993,7 @@ fn parse_alter_table_add_column() { column_def: ColumnDef { name: "b".into(), data_type: DataType::Int(None), + collation: None, options: vec![], }, column_position: Some(MySQLColumnPosition::First), @@ -2618,7 +2009,8 @@ fn parse_alter_table_add_column() { if_exists, only, operations, - .. + location: _, + on_cluster: _, } => { assert_eq!(name.to_string(), "tab"); assert!(!if_exists); @@ -2631,12 +2023,12 @@ fn parse_alter_table_add_column() { column_def: ColumnDef { name: "b".into(), data_type: DataType::Int(None), + collation: None, options: vec![], }, column_position: Some(MySQLColumnPosition::After(Ident { value: String::from("foo"), - quote_style: None, - span: Span::empty(), + quote_style: None })), },] ); @@ -2655,7 +2047,8 @@ fn parse_alter_table_add_columns() { if_exists, only, operations, - .. + location: _, + on_cluster: _, } => { assert_eq!(name.to_string(), "tab"); assert!(!if_exists); @@ -2669,6 +2062,7 @@ fn parse_alter_table_add_columns() { column_def: ColumnDef { name: "a".into(), data_type: DataType::Text, + collation: None, options: vec![], }, column_position: Some(MySQLColumnPosition::First), @@ -2679,12 +2073,12 @@ fn parse_alter_table_add_columns() { column_def: ColumnDef { name: "b".into(), data_type: DataType::Int(None), + collation: None, options: vec![], }, column_position: Some(MySQLColumnPosition::After(Ident { value: String::from("foo"), quote_style: None, - span: Span::empty(), })), }, ] @@ -2702,19 +2096,9 @@ fn parse_alter_table_drop_primary_key() { ); } -#[test] -fn parse_alter_table_drop_foreign_key() { - assert_matches!( - alter_table_op( - mysql_and_generic().verified_stmt("ALTER TABLE tab DROP FOREIGN KEY foo_ibfk_1") - ), - AlterTableOperation::DropForeignKey { name } if name.value == "foo_ibfk_1" - ); -} - #[test] fn parse_alter_table_change_column() { - let expected_name = ObjectName::from(vec![Ident::new("orders")]); + let expected_name = ObjectName(vec![Ident::new("orders")]); let expected_operation = AlterTableOperation::ChangeColumn { old_name: Ident::new("description"), new_name: Ident::new("desc"), @@ -2755,7 +2139,6 @@ fn parse_alter_table_change_column() { column_position: Some(MySQLColumnPosition::After(Ident { value: String::from("foo"), quote_style: None, - span: Span::empty(), })), }; let sql4 = "ALTER TABLE orders CHANGE COLUMN description desc TEXT NOT NULL AFTER foo"; @@ -2766,7 +2149,7 @@ fn parse_alter_table_change_column() { #[test] fn parse_alter_table_change_column_with_column_position() { - let expected_name = ObjectName::from(vec![Ident::new("orders")]); + let expected_name = ObjectName(vec![Ident::new("orders")]); let expected_operation_first = AlterTableOperation::ChangeColumn { old_name: Ident::new("description"), new_name: Ident::new("desc"), @@ -2795,7 +2178,6 @@ fn parse_alter_table_change_column_with_column_position() { column_position: Some(MySQLColumnPosition::After(Ident { value: String::from("total_count"), quote_style: None, - span: Span::empty(), })), }; @@ -2814,7 +2196,7 @@ fn parse_alter_table_change_column_with_column_position() { #[test] fn parse_alter_table_modify_column() { - let expected_name = ObjectName::from(vec![Ident::new("orders")]); + let expected_name = ObjectName(vec![Ident::new("orders")]); let expected_operation = AlterTableOperation::ModifyColumn { col_name: Ident::new("description"), data_type: DataType::Text, @@ -2852,7 +2234,6 @@ fn parse_alter_table_modify_column() { column_position: Some(MySQLColumnPosition::After(Ident { value: String::from("foo"), quote_style: None, - span: Span::empty(), })), }; let sql4 = "ALTER TABLE orders MODIFY COLUMN description TEXT NOT NULL AFTER foo"; @@ -2861,118 +2242,9 @@ fn parse_alter_table_modify_column() { assert_eq!(expected_operation, operation); } -#[test] -fn parse_alter_table_with_algorithm() { - let sql = "ALTER TABLE tab ALGORITHM = COPY"; - let expected_operation = AlterTableOperation::Algorithm { - equals: true, - algorithm: AlterTableAlgorithm::Copy, - }; - let operation = alter_table_op(mysql_and_generic().verified_stmt(sql)); - assert_eq!(expected_operation, operation); - - // Check order doesn't matter - let sql = - "ALTER TABLE users DROP COLUMN password_digest, ALGORITHM = COPY, RENAME COLUMN name TO username"; - let stmt = mysql_and_generic().verified_stmt(sql); - match stmt { - Statement::AlterTable { operations, .. } => { - assert_eq!( - operations, - vec![ - AlterTableOperation::DropColumn { - has_column_keyword: true, - column_names: vec![Ident::new("password_digest")], - if_exists: false, - drop_behavior: None, - }, - AlterTableOperation::Algorithm { - equals: true, - algorithm: AlterTableAlgorithm::Copy, - }, - AlterTableOperation::RenameColumn { - old_column_name: Ident::new("name"), - new_column_name: Ident::new("username") - }, - ] - ) - } - _ => panic!("Unexpected statement {stmt}"), - } - - mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM DEFAULT"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM INSTANT"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM INPLACE"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM COPY"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM = DEFAULT"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM = INSTANT"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM = INPLACE"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM = COPY"); -} - -#[test] -fn parse_alter_table_with_lock() { - let sql = "ALTER TABLE tab LOCK = SHARED"; - let expected_operation = AlterTableOperation::Lock { - equals: true, - lock: AlterTableLock::Shared, - }; - let operation = alter_table_op(mysql_and_generic().verified_stmt(sql)); - assert_eq!(expected_operation, operation); - - let sql = - "ALTER TABLE users DROP COLUMN password_digest, LOCK = EXCLUSIVE, RENAME COLUMN name TO username"; - let stmt = mysql_and_generic().verified_stmt(sql); - match stmt { - Statement::AlterTable { operations, .. } => { - assert_eq!( - operations, - vec![ - AlterTableOperation::DropColumn { - has_column_keyword: true, - column_names: vec![Ident::new("password_digest")], - if_exists: false, - drop_behavior: None, - }, - AlterTableOperation::Lock { - equals: true, - lock: AlterTableLock::Exclusive, - }, - AlterTableOperation::RenameColumn { - old_column_name: Ident::new("name"), - new_column_name: Ident::new("username") - }, - ] - ) - } - _ => panic!("Unexpected statement {stmt}"), - } - mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK DEFAULT"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK SHARED"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK NONE"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK EXCLUSIVE"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK = DEFAULT"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK = SHARED"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK = NONE"); - mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK = EXCLUSIVE"); -} - -#[test] -fn parse_alter_table_auto_increment() { - let sql = "ALTER TABLE tab AUTO_INCREMENT = 42"; - let expected_operation = AlterTableOperation::AutoIncrement { - equals: true, - value: number("42").with_empty_span(), - }; - let operation = alter_table_op(mysql().verified_stmt(sql)); - assert_eq!(expected_operation, operation); - - mysql_and_generic().verified_stmt("ALTER TABLE `users` AUTO_INCREMENT 42"); -} - #[test] fn parse_alter_table_modify_column_with_column_position() { - let expected_name = ObjectName::from(vec![Ident::new("orders")]); + let expected_name = ObjectName(vec![Ident::new("orders")]); let expected_operation_first = AlterTableOperation::ModifyColumn { col_name: Ident::new("description"), data_type: DataType::Text, @@ -2999,7 +2271,6 @@ fn parse_alter_table_modify_column_with_column_position() { column_position: Some(MySQLColumnPosition::After(Ident { value: String::from("total_count"), quote_style: None, - span: Span::empty(), })), }; @@ -3018,8 +2289,6 @@ fn parse_alter_table_modify_column_with_column_position() { #[test] fn parse_substring_in_select() { - use sqlparser::tokenizer::Span; - let sql = "SELECT DISTINCT SUBSTRING(description, 0, 1) FROM test"; match mysql().one_statement_parses_to( sql, @@ -3030,33 +2299,32 @@ fn parse_substring_in_select() { Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: Some(Distinct::Distinct), top: None, top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Substring { expr: Box::new(Expr::Identifier(Ident { value: "description".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None })), - substring_from: Some(Box::new(Expr::Value( - (number("0")).with_empty_span() - ))), - substring_for: Some(Box::new(Expr::Value( - (number("1")).with_empty_span() - ))), + substring_from: Some(Box::new(Expr::Value(number("0")))), + substring_for: Some(Box::new(Expr::Value(number("1")))), special: true, - shorthand: false, })], - exclude: None, into: None, from: vec![TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident { - value: "test".to_string(), - quote_style: None, - span: Span::empty(), - }])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident { + value: "test".to_string(), + quote_style: None + }]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![] }], lateral_views: vec![], @@ -3072,16 +2340,16 @@ fn parse_substring_in_select() { qualify: None, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], }), query ); @@ -3115,17 +2383,6 @@ fn parse_rlike_and_regexp() { } } -#[test] -fn parse_like_with_escape() { - // verify backslash is not stripped for escaped wildcards - mysql().verified_only_select(r#"SELECT 'a\%c' LIKE 'a\%c'"#); - mysql().verified_only_select(r#"SELECT 'a\_c' LIKE 'a\_c'"#); - mysql().verified_only_select(r#"SELECT '%\_\%' LIKE '%\_\%'"#); - mysql().verified_only_select(r#"SELECT '\_\%' LIKE CONCAT('\_', '\%')"#); - mysql().verified_only_select(r#"SELECT 'a%c' LIKE 'a$%c' ESCAPE '$'"#); - mysql().verified_only_select(r#"SELECT 'a_c' LIKE 'a#_c' ESCAPE '#'"#); -} - #[test] fn parse_kill() { let stmt = mysql_and_generic().verified_stmt("KILL CONNECTION 5"); @@ -3157,7 +2414,7 @@ fn parse_kill() { } #[test] -fn parse_table_column_option_on_update() { +fn parse_table_colum_option_on_update() { let sql1 = "CREATE TABLE foo (`modification_time` DATETIME ON UPDATE CURRENT_TIMESTAMP())"; match mysql().verified_stmt(sql1) { Statement::CreateTable(CreateTable { name, columns, .. }) => { @@ -3166,6 +2423,7 @@ fn parse_table_column_option_on_update() { vec![ColumnDef { name: Ident::with_quote('`', "modification_time"), data_type: DataType::Datetime(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::OnUpdate(call("CURRENT_TIMESTAMP", [])), @@ -3183,19 +2441,19 @@ fn parse_set_names() { let stmt = mysql_and_generic().verified_stmt("SET NAMES utf8mb4"); assert_eq!( stmt, - Statement::Set(Set::SetNames { - charset_name: "utf8mb4".into(), + Statement::SetNames { + charset_name: "utf8mb4".to_string(), collation_name: None, - }) + } ); let stmt = mysql_and_generic().verified_stmt("SET NAMES utf8mb4 COLLATE bogus"); assert_eq!( stmt, - Statement::Set(Set::SetNames { - charset_name: "utf8mb4".into(), + Statement::SetNames { + charset_name: "utf8mb4".to_string(), collation_name: Some("bogus".to_string()), - }) + } ); let stmt = mysql_and_generic() @@ -3203,20 +2461,22 @@ fn parse_set_names() { .unwrap(); assert_eq!( stmt, - vec![Statement::Set(Set::SetNames { - charset_name: "utf8mb4".into(), + vec![Statement::SetNames { + charset_name: "utf8mb4".to_string(), collation_name: Some("bogus".to_string()), - })] + }] ); let stmt = mysql_and_generic().verified_stmt("SET NAMES DEFAULT"); - assert_eq!(stmt, Statement::Set(Set::SetNamesDefault {})); + assert_eq!(stmt, Statement::SetNamesDefault {}); } #[test] fn parse_limit_my_sql_syntax() { - mysql_and_generic().verified_stmt("SELECT id, fname, lname FROM customer LIMIT 10 OFFSET 5"); - mysql_and_generic().verified_stmt("SELECT id, fname, lname FROM customer LIMIT 5, 10"); + mysql_and_generic().one_statement_parses_to( + "SELECT id, fname, lname FROM customer LIMIT 5, 10", + "SELECT id, fname, lname FROM customer LIMIT 10 OFFSET 5", + ); mysql_and_generic().verified_stmt("SELECT * FROM user LIMIT ? OFFSET ?"); } @@ -3340,6 +2600,14 @@ fn parse_create_table_with_fulltext_definition_should_not_accept_constraint_name mysql_and_generic().verified_stmt("CREATE TABLE tb (c1 INT, CONSTRAINT cons FULLTEXT (c1))"); } +fn mysql() -> TestedDialects { + TestedDialects::new(vec![Box::new(MySqlDialect {})]) +} + +fn mysql_and_generic() -> TestedDialects { + TestedDialects::new(vec![Box::new(MySqlDialect {}), Box::new(GenericDialect {})]) +} + #[test] fn parse_values() { mysql().verified_stmt("VALUES ROW(1, true, 'a')"); @@ -3353,18 +2621,13 @@ fn parse_hex_string_introducer() { Statement::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, - projection: vec![SelectItem::UnnamedExpr(Expr::Prefixed { - prefix: Ident::from("_latin1"), - value: Expr::Value( - Value::HexStringLiteral("4D7953514C".to_string()).with_empty_span() - ) - .into(), + projection: vec![SelectItem::UnnamedExpr(Expr::IntroducedString { + introducer: "_latin1".to_string(), + value: Value::HexStringLiteral("4D7953514C".to_string()) })], - exclude: None, from: vec![], lateral_views: vec![], prewhere: None, @@ -3380,16 +2643,16 @@ fn parse_hex_string_introducer() { value_table_mode: None, into: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })) ) } @@ -3451,27 +2714,19 @@ fn parse_convert_using() { #[test] fn parse_create_table_with_column_collate() { let sql = "CREATE TABLE tb (id TEXT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci)"; - match mysql().verified_stmt(sql) { + let canonical = "CREATE TABLE tb (id TEXT COLLATE utf8mb4_0900_ai_ci CHARACTER SET utf8mb4)"; + match mysql().one_statement_parses_to(sql, canonical) { Statement::CreateTable(CreateTable { name, columns, .. }) => { assert_eq!(name.to_string(), "tb"); assert_eq!( vec![ColumnDef { name: Ident::new("id"), data_type: DataType::Text, - options: vec![ - ColumnOptionDef { - name: None, - option: ColumnOption::CharacterSet(ObjectName::from(vec![Ident::new( - "utf8mb4" - )])) - }, - ColumnOptionDef { - name: None, - option: ColumnOption::Collation(ObjectName::from(vec![Ident::new( - "utf8mb4_0900_ai_ci" - )])) - } - ], + collation: Some(ObjectName(vec![Ident::new("utf8mb4_0900_ai_ci")])), + options: vec![ColumnOptionDef { + name: None, + option: ColumnOption::CharacterSet(ObjectName(vec![Ident::new("utf8mb4")])) + }], },], columns ); @@ -3532,7 +2787,7 @@ fn parse_json_table() { .from[0] .relation, TableFactor::JsonTable { - json_expr: Expr::Value((Value::SingleQuotedString("[1,2]".to_string())).with_empty_span()), + json_expr: Expr::Value(Value::SingleQuotedString("[1,2]".to_string())), json_path: Value::SingleQuotedString("$[*]".to_string()), columns: vec![ JsonTableColumn::Named(JsonTableNamedColumn { @@ -3562,584 +2817,3 @@ fn test_group_concat() { mysql_and_generic() .verified_expr("GROUP_CONCAT(DISTINCT test_score ORDER BY test_score DESC SEPARATOR ' ')"); } - -/// The XOR binary operator is only supported in MySQL -#[test] -fn parse_logical_xor() { - let sql = "SELECT true XOR true, false XOR false, true XOR false, false XOR true"; - let select = mysql_and_generic().verified_only_select(sql); - assert_eq!( - SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Value((Value::Boolean(true)).with_empty_span())), - op: BinaryOperator::Xor, - right: Box::new(Expr::Value((Value::Boolean(true)).with_empty_span())), - }), - select.projection[0] - ); - assert_eq!( - SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Value((Value::Boolean(false)).with_empty_span())), - op: BinaryOperator::Xor, - right: Box::new(Expr::Value((Value::Boolean(false)).with_empty_span())), - }), - select.projection[1] - ); - assert_eq!( - SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Value((Value::Boolean(true)).with_empty_span())), - op: BinaryOperator::Xor, - right: Box::new(Expr::Value((Value::Boolean(false)).with_empty_span())), - }), - select.projection[2] - ); - assert_eq!( - SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Value((Value::Boolean(false)).with_empty_span())), - op: BinaryOperator::Xor, - right: Box::new(Expr::Value((Value::Boolean(true)).with_empty_span())), - }), - select.projection[3] - ); -} - -#[test] -fn parse_bitstring_literal() { - let select = mysql_and_generic().verified_only_select("SELECT B'111'"); - assert_eq!( - select.projection, - vec![SelectItem::UnnamedExpr(Expr::Value( - (Value::SingleQuotedByteStringLiteral("111".to_string())).with_empty_span() - ))] - ); -} - -#[test] -fn parse_grant() { - let sql = "GRANT ALL ON *.* TO 'jeffrey'@'%'"; - let stmt = mysql().verified_stmt(sql); - if let Statement::Grant { - privileges, - objects, - grantees, - with_grant_option, - as_grantor: _, - granted_by, - current_grants: _, - } = stmt - { - assert_eq!( - privileges, - Privileges::All { - with_privileges_keyword: false - } - ); - assert_eq!( - objects, - Some(GrantObjects::Tables(vec![ObjectName::from(vec![ - "*".into(), - "*".into() - ])])) - ); - assert!(!with_grant_option); - assert!(granted_by.is_none()); - if let [Grantee { - grantee_type: GranteesType::None, - name: Some(GranteeName::UserHost { user, host }), - }] = grantees.as_slice() - { - assert_eq!(user.value, "jeffrey"); - assert_eq!(user.quote_style, Some('\'')); - assert_eq!(host.value, "%"); - assert_eq!(host.quote_style, Some('\'')); - } else { - unreachable!() - } - } else { - unreachable!() - } -} - -#[test] -fn parse_revoke() { - let sql = "REVOKE ALL ON db1.* FROM 'jeffrey'@'%'"; - let stmt = mysql_and_generic().verified_stmt(sql); - if let Statement::Revoke { - privileges, - objects, - grantees, - granted_by, - cascade, - } = stmt - { - assert_eq!( - privileges, - Privileges::All { - with_privileges_keyword: false - } - ); - assert_eq!( - objects, - Some(GrantObjects::Tables(vec![ObjectName::from(vec![ - "db1".into(), - "*".into() - ])])) - ); - if let [Grantee { - grantee_type: GranteesType::None, - name: Some(GranteeName::UserHost { user, host }), - }] = grantees.as_slice() - { - assert_eq!(user.value, "jeffrey"); - assert_eq!(user.quote_style, Some('\'')); - assert_eq!(host.value, "%"); - assert_eq!(host.quote_style, Some('\'')); - } else { - unreachable!() - } - assert!(granted_by.is_none()); - assert!(cascade.is_none()); - } else { - unreachable!() - } -} - -#[test] -fn parse_create_view_algorithm_param() { - let sql = "CREATE ALGORITHM = MERGE VIEW foo AS SELECT 1"; - let stmt = mysql().verified_stmt(sql); - if let Statement::CreateView { - params: - Some(CreateViewParams { - algorithm, - definer, - security, - }), - .. - } = stmt - { - assert_eq!(algorithm, Some(CreateViewAlgorithm::Merge)); - assert!(definer.is_none()); - assert!(security.is_none()); - } else { - unreachable!() - } - mysql().verified_stmt("CREATE ALGORITHM = UNDEFINED VIEW foo AS SELECT 1"); - mysql().verified_stmt("CREATE ALGORITHM = TEMPTABLE VIEW foo AS SELECT 1"); -} - -#[test] -fn parse_create_view_definer_param() { - let sql = "CREATE DEFINER = 'jeffrey'@'localhost' VIEW foo AS SELECT 1"; - let stmt = mysql().verified_stmt(sql); - if let Statement::CreateView { - params: - Some(CreateViewParams { - algorithm, - definer, - security, - }), - .. - } = stmt - { - assert!(algorithm.is_none()); - if let Some(GranteeName::UserHost { user, host }) = definer { - assert_eq!(user.value, "jeffrey"); - assert_eq!(user.quote_style, Some('\'')); - assert_eq!(host.value, "localhost"); - assert_eq!(host.quote_style, Some('\'')); - } else { - unreachable!() - } - assert!(security.is_none()); - } else { - unreachable!() - } -} - -#[test] -fn parse_create_view_security_param() { - let sql = "CREATE SQL SECURITY DEFINER VIEW foo AS SELECT 1"; - let stmt = mysql().verified_stmt(sql); - if let Statement::CreateView { - params: - Some(CreateViewParams { - algorithm, - definer, - security, - }), - .. - } = stmt - { - assert!(algorithm.is_none()); - assert!(definer.is_none()); - assert_eq!(security, Some(CreateViewSecurity::Definer)); - } else { - unreachable!() - } - mysql().verified_stmt("CREATE SQL SECURITY INVOKER VIEW foo AS SELECT 1"); -} - -#[test] -fn parse_create_view_multiple_params() { - let sql = "CREATE ALGORITHM = UNDEFINED DEFINER = `root`@`%` SQL SECURITY INVOKER VIEW foo AS SELECT 1"; - let stmt = mysql().verified_stmt(sql); - if let Statement::CreateView { - params: - Some(CreateViewParams { - algorithm, - definer, - security, - }), - .. - } = stmt - { - assert_eq!(algorithm, Some(CreateViewAlgorithm::Undefined)); - if let Some(GranteeName::UserHost { user, host }) = definer { - assert_eq!(user.value, "root"); - assert_eq!(user.quote_style, Some('`')); - assert_eq!(host.value, "%"); - assert_eq!(host.quote_style, Some('`')); - } else { - unreachable!() - } - assert_eq!(security, Some(CreateViewSecurity::Invoker)); - } else { - unreachable!() - } -} - -#[test] -fn parse_longblob_type() { - let sql = "CREATE TABLE foo (bar LONGBLOB)"; - let stmt = mysql_and_generic().verified_stmt(sql); - if let Statement::CreateTable(CreateTable { columns, .. }) = stmt { - assert_eq!(columns.len(), 1); - assert_eq!(columns[0].data_type, DataType::LongBlob); - } else { - unreachable!() - } - mysql_and_generic().verified_stmt("CREATE TABLE foo (bar TINYBLOB)"); - mysql_and_generic().verified_stmt("CREATE TABLE foo (bar MEDIUMBLOB)"); - mysql_and_generic().verified_stmt("CREATE TABLE foo (bar TINYTEXT)"); - mysql_and_generic().verified_stmt("CREATE TABLE foo (bar MEDIUMTEXT)"); - mysql_and_generic().verified_stmt("CREATE TABLE foo (bar LONGTEXT)"); -} - -#[test] -fn parse_begin_without_transaction() { - mysql().verified_stmt("BEGIN"); -} - -#[test] -fn parse_geometric_types_srid_option() { - mysql_and_generic().verified_stmt("CREATE TABLE t (a geometry SRID 4326)"); -} - -#[test] -fn parse_double_precision() { - mysql().verified_stmt("CREATE TABLE foo (bar DOUBLE)"); - mysql().verified_stmt("CREATE TABLE foo (bar DOUBLE(11,0))"); - mysql().one_statement_parses_to( - "CREATE TABLE foo (bar DOUBLE(11, 0))", - "CREATE TABLE foo (bar DOUBLE(11,0))", - ); -} - -#[test] -fn parse_looks_like_single_line_comment() { - mysql().one_statement_parses_to( - "UPDATE account SET balance=balance--1 WHERE account_id=5752", - "UPDATE account SET balance = balance - -1 WHERE account_id = 5752", - ); - mysql().one_statement_parses_to( - r#" - UPDATE account SET balance=balance-- 1 - WHERE account_id=5752 - "#, - "UPDATE account SET balance = balance WHERE account_id = 5752", - ); -} - -#[test] -fn parse_create_trigger() { - let sql_create_trigger = r#" - CREATE TRIGGER emp_stamp BEFORE INSERT ON emp - FOR EACH ROW EXECUTE FUNCTION emp_stamp(); - "#; - let create_stmt = mysql().one_statement_parses_to(sql_create_trigger, ""); - assert_eq!( - create_stmt, - Statement::CreateTrigger { - or_alter: false, - or_replace: false, - is_constraint: false, - name: ObjectName::from(vec![Ident::new("emp_stamp")]), - period: TriggerPeriod::Before, - events: vec![TriggerEvent::Insert], - table_name: ObjectName::from(vec![Ident::new("emp")]), - referenced_table_name: None, - referencing: vec![], - trigger_object: TriggerObject::Row, - include_each: true, - condition: None, - exec_body: Some(TriggerExecBody { - exec_type: TriggerExecBodyType::Function, - func_desc: FunctionDesc { - name: ObjectName::from(vec![Ident::new("emp_stamp")]), - args: None, - } - }), - statements: None, - characteristics: None, - } - ); -} - -#[test] -fn parse_drop_trigger() { - let sql_drop_trigger = "DROP TRIGGER emp_stamp;"; - let drop_stmt = mysql().one_statement_parses_to(sql_drop_trigger, ""); - assert_eq!( - drop_stmt, - Statement::DropTrigger { - if_exists: false, - trigger_name: ObjectName::from(vec![Ident::new("emp_stamp")]), - table_name: None, - option: None, - } - ); -} - -#[test] -fn parse_cast_integers() { - mysql().verified_expr("CAST(foo AS UNSIGNED)"); - mysql().verified_expr("CAST(foo AS SIGNED)"); - mysql().verified_expr("CAST(foo AS UNSIGNED INTEGER)"); - mysql().verified_expr("CAST(foo AS SIGNED INTEGER)"); - - mysql() - .run_parser_method("CAST(foo AS UNSIGNED(3))", |p| p.parse_expr()) - .expect_err("CAST doesn't allow display width"); - mysql() - .run_parser_method("CAST(foo AS UNSIGNED(3) INTEGER)", |p| p.parse_expr()) - .expect_err("CAST doesn't allow display width"); - mysql() - .run_parser_method("CAST(foo AS UNSIGNED INTEGER(3))", |p| p.parse_expr()) - .expect_err("CAST doesn't allow display width"); -} - -#[test] -fn parse_match_against_with_alias() { - let sql = "SELECT tbl.ProjectID FROM surveys.tbl1 AS tbl WHERE MATCH (tbl.ReferenceID) AGAINST ('AAA' IN BOOLEAN MODE)"; - match mysql().verified_stmt(sql) { - Statement::Query(query) => match *query.body { - SetExpr::Select(select) => match select.selection { - Some(Expr::MatchAgainst { - columns, - match_value, - opt_search_modifier, - }) => { - assert_eq!( - columns, - vec![ObjectName::from(vec![ - Ident::new("tbl"), - Ident::new("ReferenceID") - ])] - ); - assert_eq!(match_value, Value::SingleQuotedString("AAA".to_owned())); - assert_eq!(opt_search_modifier, Some(SearchModifier::InBooleanMode)); - } - _ => unreachable!(), - }, - _ => unreachable!(), - }, - _ => unreachable!(), - } -} - -#[test] -fn test_variable_assignment_using_colon_equal() { - let sql_select = "SELECT @price := price, @tax := price * 0.1 FROM products WHERE id = 1"; - let stmt = mysql().verified_stmt(sql_select); - match stmt { - Statement::Query(query) => { - let select = query.body.as_select().unwrap(); - - assert_eq!( - select.projection, - vec![ - SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident { - value: "@price".to_string(), - quote_style: None, - span: Span::empty(), - })), - op: BinaryOperator::Assignment, - right: Box::new(Expr::Identifier(Ident { - value: "price".to_string(), - quote_style: None, - span: Span::empty(), - })), - }), - SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident { - value: "@tax".to_string(), - quote_style: None, - span: Span::empty(), - })), - op: BinaryOperator::Assignment, - right: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident { - value: "price".to_string(), - quote_style: None, - span: Span::empty(), - })), - op: BinaryOperator::Multiply, - right: Box::new(Expr::Value( - (test_utils::number("0.1")).with_empty_span() - )), - }), - }), - ] - ); - - assert_eq!( - select.selection, - Some(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident { - value: "id".to_string(), - quote_style: None, - span: Span::empty(), - })), - op: BinaryOperator::Eq, - right: Box::new(Expr::Value((test_utils::number("1")).with_empty_span())), - }) - ); - } - _ => panic!("Unexpected statement {stmt}"), - } - - let sql_update = - "UPDATE products SET price = @new_price := price * 1.1 WHERE category = 'Books'"; - let stmt = mysql().verified_stmt(sql_update); - - match stmt { - Statement::Update { assignments, .. } => { - assert_eq!( - assignments, - vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![ - ObjectNamePart::Identifier(Ident { - value: "price".to_string(), - quote_style: None, - span: Span::empty(), - }) - ])), - value: Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident { - value: "@new_price".to_string(), - quote_style: None, - span: Span::empty(), - })), - op: BinaryOperator::Assignment, - right: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident { - value: "price".to_string(), - quote_style: None, - span: Span::empty(), - })), - op: BinaryOperator::Multiply, - right: Box::new(Expr::Value( - (test_utils::number("1.1")).with_empty_span() - )), - }), - }, - }] - ) - } - _ => panic!("Unexpected statement {stmt}"), - } -} - -#[test] -fn parse_straight_join() { - mysql().verified_stmt( - "SELECT a.*, b.* FROM table_a AS a STRAIGHT_JOIN table_b AS b ON a.b_id = b.id", - ); - // Without table alias - mysql() - .verified_stmt("SELECT a.*, b.* FROM table_a STRAIGHT_JOIN table_b AS b ON a.b_id = b.id"); -} - -#[test] -fn mysql_foreign_key_with_index_name() { - mysql().verified_stmt( - "CREATE TABLE orders (customer_id INT, INDEX idx_customer (customer_id), CONSTRAINT fk_customer FOREIGN KEY idx_customer (customer_id) REFERENCES customers(id))", - ); -} - -#[test] -fn parse_drop_index() { - let sql = "DROP INDEX idx_name ON table_name"; - match mysql().verified_stmt(sql) { - Statement::Drop { - object_type, - if_exists, - names, - cascade, - restrict, - purge, - temporary, - table, - } => { - assert!(!if_exists); - assert_eq!(ObjectType::Index, object_type); - assert_eq!( - vec!["idx_name"], - names.iter().map(ToString::to_string).collect::>() - ); - assert!(!cascade); - assert!(!restrict); - assert!(!purge); - assert!(!temporary); - assert!(table.is_some()); - assert_eq!("table_name", table.unwrap().to_string()); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_alter_table_drop_index() { - assert_matches!( - alter_table_op( - mysql_and_generic().verified_stmt("ALTER TABLE tab DROP INDEX idx_index") - ), - AlterTableOperation::DropIndex { name } if name.value == "idx_index" - ); -} - -#[test] -fn parse_json_member_of() { - mysql().verified_stmt(r#"SELECT 17 MEMBER OF('[23, "abc", 17, "ab", 10]')"#); - let sql = r#"SELECT 'ab' MEMBER OF('[23, "abc", 17, "ab", 10]')"#; - let stmt = mysql().verified_stmt(sql); - match stmt { - Statement::Query(query) => { - let select = query.body.as_select().unwrap(); - assert_eq!( - select.projection, - vec![SelectItem::UnnamedExpr(Expr::MemberOf(MemberOf { - value: Box::new(Expr::Value( - Value::SingleQuotedString("ab".to_string()).into() - )), - array: Box::new(Expr::Value( - Value::SingleQuotedString(r#"[23, "abc", 17, "ab", 10]"#.to_string()) - .into() - )), - }))] - ); - } - _ => panic!("Unexpected statement {stmt}"), - } -} diff --git a/tests/sqlparser_postgres.rs b/tests/sqlparser_postgres.rs index 0d1d138c..c30603ba 100644 --- a/tests/sqlparser_postgres.rs +++ b/tests/sqlparser_postgres.rs @@ -21,9 +21,6 @@ #[macro_use] mod test_utils; - -use helpers::attached_token::AttachedToken; -use sqlparser::tokenizer::Span; use test_utils::*; use sqlparser::ast::*; @@ -349,7 +346,7 @@ fn parse_create_table_with_defaults() { name, columns, constraints, - table_options, + with_options, if_not_exists: false, external: false, file_format: None, @@ -364,6 +361,7 @@ fn parse_create_table_with_defaults() { ColumnDef { name: "customer_id".into(), data_type: DataType::Integer(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Default( @@ -374,6 +372,7 @@ fn parse_create_table_with_defaults() { ColumnDef { name: "store_id".into(), data_type: DataType::SmallInt(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -387,6 +386,7 @@ fn parse_create_table_with_defaults() { unit: None } )), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -400,18 +400,11 @@ fn parse_create_table_with_defaults() { unit: None } )), - options: vec![ - ColumnOptionDef { - name: None, - option: ColumnOption::Collation(ObjectName::from(vec![ - Ident::with_quote('"', "es_ES") - ])), - }, - ColumnOptionDef { - name: None, - option: ColumnOption::NotNull, - } - ], + collation: Some(ObjectName(vec![Ident::with_quote('"', "es_ES")])), + options: vec![ColumnOptionDef { + name: None, + option: ColumnOption::NotNull, + }], }, ColumnDef { name: "email".into(), @@ -421,11 +414,13 @@ fn parse_create_table_with_defaults() { unit: None } )), + collation: None, options: vec![], }, ColumnDef { name: "address_id".into(), data_type: DataType::SmallInt(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull @@ -434,12 +429,11 @@ fn parse_create_table_with_defaults() { ColumnDef { name: "activebool".into(), data_type: DataType::Boolean, + collation: None, options: vec![ ColumnOptionDef { name: None, - option: ColumnOption::Default(Expr::Value( - (Value::Boolean(true)).with_empty_span() - )), + option: ColumnOption::Default(Expr::Value(Value::Boolean(true))), }, ColumnOptionDef { name: None, @@ -450,6 +444,7 @@ fn parse_create_table_with_defaults() { ColumnDef { name: "create_date".into(), data_type: DataType::Date, + collation: None, options: vec![ ColumnOptionDef { name: None, @@ -464,6 +459,7 @@ fn parse_create_table_with_defaults() { ColumnDef { name: "last_update".into(), data_type: DataType::Timestamp(None, TimezoneInfo::WithoutTimeZone), + collation: None, options: vec![ ColumnOptionDef { name: None, @@ -478,6 +474,7 @@ fn parse_create_table_with_defaults() { ColumnDef { name: "active".into(), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull @@ -486,25 +483,20 @@ fn parse_create_table_with_defaults() { ] ); assert!(constraints.is_empty()); - - let with_options = match table_options { - CreateTableOptions::With(options) => options, - _ => unreachable!(), - }; assert_eq!( with_options, vec![ SqlOption::KeyValue { key: "fillfactor".into(), - value: Expr::value(number("20")) + value: Expr::Value(number("20")) }, SqlOption::KeyValue { key: "user_catalog_table".into(), - value: Expr::Value((Value::Boolean(true)).with_empty_span()) + value: Expr::Value(Value::Boolean(true)) }, SqlOption::KeyValue { key: "autovacuum_vacuum_threshold".into(), - value: Expr::value(number("100")) + value: Expr::Value(number("100")) }, ] ); @@ -600,26 +592,6 @@ fn parse_alter_table_constraints_rename() { } } -#[test] -fn parse_alter_table_constraints_unique_nulls_distinct() { - match pg_and_generic() - .verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE NULLS NOT DISTINCT (c)") - { - Statement::AlterTable { operations, .. } => match &operations[0] { - AlterTableOperation::AddConstraint { - constraint: TableConstraint::Unique { nulls_distinct, .. }, - .. - } => { - assert_eq!(nulls_distinct, &NullsDistinctOption::NotDistinct) - } - _ => unreachable!(), - }, - _ => unreachable!(), - } - pg_and_generic().verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE NULLS DISTINCT (c)"); - pg_and_generic().verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE (c)"); -} - #[test] fn parse_alter_table_disable() { pg_and_generic().verified_stmt("ALTER TABLE tab DISABLE ROW LEVEL SECURITY"); @@ -669,103 +641,12 @@ fn parse_create_extension() { .verified_stmt("CREATE EXTENSION extension_name WITH SCHEMA schema_name VERSION version"); } -#[test] -fn parse_drop_extension() { - assert_eq!( - pg_and_generic().verified_stmt("DROP EXTENSION extension_name"), - Statement::DropExtension { - names: vec!["extension_name".into()], - if_exists: false, - cascade_or_restrict: None, - } - ); - assert_eq!( - pg_and_generic().verified_stmt("DROP EXTENSION extension_name CASCADE"), - Statement::DropExtension { - names: vec!["extension_name".into()], - if_exists: false, - cascade_or_restrict: Some(ReferentialAction::Cascade), - } - ); - - assert_eq!( - pg_and_generic().verified_stmt("DROP EXTENSION extension_name RESTRICT"), - Statement::DropExtension { - names: vec!["extension_name".into()], - if_exists: false, - cascade_or_restrict: Some(ReferentialAction::Restrict), - } - ); - - assert_eq!( - pg_and_generic().verified_stmt("DROP EXTENSION extension_name, extension_name2 CASCADE"), - Statement::DropExtension { - names: vec!["extension_name".into(), "extension_name2".into()], - if_exists: false, - cascade_or_restrict: Some(ReferentialAction::Cascade), - } - ); - - assert_eq!( - pg_and_generic().verified_stmt("DROP EXTENSION extension_name, extension_name2 RESTRICT"), - Statement::DropExtension { - names: vec!["extension_name".into(), "extension_name2".into()], - if_exists: false, - cascade_or_restrict: Some(ReferentialAction::Restrict), - } - ); - - assert_eq!( - pg_and_generic().verified_stmt("DROP EXTENSION IF EXISTS extension_name"), - Statement::DropExtension { - names: vec!["extension_name".into()], - if_exists: true, - cascade_or_restrict: None, - } - ); - - assert_eq!( - pg_and_generic().verified_stmt("DROP EXTENSION IF EXISTS extension_name CASCADE"), - Statement::DropExtension { - names: vec!["extension_name".into()], - if_exists: true, - cascade_or_restrict: Some(ReferentialAction::Cascade), - } - ); - - assert_eq!( - pg_and_generic().verified_stmt("DROP EXTENSION IF EXISTS extension_name RESTRICT"), - Statement::DropExtension { - names: vec!["extension_name".into()], - if_exists: true, - cascade_or_restrict: Some(ReferentialAction::Restrict), - } - ); - - assert_eq!( - pg_and_generic() - .verified_stmt("DROP EXTENSION IF EXISTS extension_name1, extension_name2 CASCADE"), - Statement::DropExtension { - names: vec!["extension_name1".into(), "extension_name2".into()], - if_exists: true, - cascade_or_restrict: Some(ReferentialAction::Cascade), - } - ); - - assert_eq!( - pg_and_generic() - .verified_stmt("DROP EXTENSION IF EXISTS extension_name1, extension_name2 RESTRICT"), - Statement::DropExtension { - names: vec!["extension_name1".into(), "extension_name2".into()], - if_exists: true, - cascade_or_restrict: Some(ReferentialAction::Restrict), - } - ); -} - #[test] fn parse_alter_table_alter_column() { - pg().verified_stmt("ALTER TABLE tab ALTER COLUMN is_active TYPE TEXT USING 'text'"); + pg().one_statement_parses_to( + "ALTER TABLE tab ALTER COLUMN is_active TYPE TEXT USING 'text'", + "ALTER TABLE tab ALTER COLUMN is_active SET DATA TYPE TEXT USING 'text'", + ); match alter_table_op( pg().verified_stmt( @@ -774,14 +655,12 @@ fn parse_alter_table_alter_column() { ) { AlterTableOperation::AlterColumn { column_name, op } => { assert_eq!("is_active", column_name.to_string()); - let using_expr = - Expr::Value(Value::SingleQuotedString("text".to_string()).with_empty_span()); + let using_expr = Expr::Value(Value::SingleQuotedString("text".to_string())); assert_eq!( op, AlterColumnOperation::SetDataType { data_type: DataType::Text, using: Some(using_expr), - had_set: true, } ); } @@ -833,7 +712,8 @@ fn parse_alter_table_add_columns() { if_exists, only, operations, - .. + location: _, + on_cluster: _, } => { assert_eq!(name.to_string(), "tab"); assert!(if_exists); @@ -847,6 +727,7 @@ fn parse_alter_table_add_columns() { column_def: ColumnDef { name: "a".into(), data_type: DataType::Text, + collation: None, options: vec![], }, column_position: None, @@ -857,6 +738,7 @@ fn parse_alter_table_add_columns() { column_def: ColumnDef { name: "b".into(), data_type: DataType::Int(None), + collation: None, options: vec![], }, column_position: None, @@ -913,7 +795,8 @@ fn parse_alter_table_owner_to() { if_exists: _, only: _, operations, - .. + location: _, + on_cluster: _, } => { assert_eq!(name.to_string(), "tab"); assert_eq!( @@ -991,7 +874,6 @@ fn parse_create_schema_if_not_exists() { Statement::CreateSchema { if_not_exists: true, schema_name, - .. } => assert_eq!("schema_name", schema_name.to_string()), _ => unreachable!(), } @@ -1043,7 +925,7 @@ fn test_copy_from() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["users".into()]), + table_name: ObjectName(vec!["users".into()]), columns: vec![], }, to: false, @@ -1061,7 +943,7 @@ fn test_copy_from() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["users".into()]), + table_name: ObjectName(vec!["users".into()]), columns: vec![], }, to: false, @@ -1079,7 +961,7 @@ fn test_copy_from() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["users".into()]), + table_name: ObjectName(vec!["users".into()]), columns: vec![], }, to: false, @@ -1103,7 +985,7 @@ fn test_copy_to() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["users".into()]), + table_name: ObjectName(vec!["users".into()]), columns: vec![], }, to: true, @@ -1121,7 +1003,7 @@ fn test_copy_to() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["users".into()]), + table_name: ObjectName(vec!["users".into()]), columns: vec![], }, to: true, @@ -1139,7 +1021,7 @@ fn test_copy_to() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["users".into()]), + table_name: ObjectName(vec!["users".into()]), columns: vec![], }, to: true, @@ -1180,7 +1062,7 @@ fn parse_copy_from() { pg_and_generic().one_statement_parses_to(sql, ""), Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["table".into()]), + table_name: ObjectName(vec!["table".into()]), columns: vec!["a".into(), "b".into()], }, to: false, @@ -1226,7 +1108,7 @@ fn parse_copy_to() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["users".into()]), + table_name: ObjectName(vec!["users".into()]), columns: vec![], }, to: true, @@ -1244,7 +1126,7 @@ fn parse_copy_to() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["country".into()]), + table_name: ObjectName(vec!["country".into()]), columns: vec![], }, to: true, @@ -1261,7 +1143,7 @@ fn parse_copy_to() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["country".into()]), + table_name: ObjectName(vec!["country".into()]), columns: vec![], }, to: true, @@ -1281,31 +1163,25 @@ fn parse_copy_to() { source: CopySource::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, projection: vec![ SelectItem::ExprWithAlias { - expr: Expr::value(number("42")), + expr: Expr::Value(number("42")), alias: Ident { value: "a".into(), quote_style: None, - span: Span::empty(), }, }, SelectItem::ExprWithAlias { - expr: Expr::Value( - (Value::SingleQuotedString("hello".into())).with_empty_span() - ), + expr: Expr::Value(Value::SingleQuotedString("hello".into())), alias: Ident { value: "b".into(), quote_style: None, - span: Span::empty(), }, } ], - exclude: None, into: None, from: vec![], lateral_views: vec![], @@ -1321,16 +1197,16 @@ fn parse_copy_to() { qualify: None, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), to: true, target: CopyTarget::File { @@ -1350,7 +1226,7 @@ fn parse_copy_from_before_v9_0() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["users".into()]), + table_name: ObjectName(vec!["users".into()]), columns: vec![], }, to: false, @@ -1379,7 +1255,7 @@ fn parse_copy_from_before_v9_0() { pg_and_generic().one_statement_parses_to(sql, ""), Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["users".into()]), + table_name: ObjectName(vec!["users".into()]), columns: vec![], }, to: false, @@ -1407,7 +1283,7 @@ fn parse_copy_to_before_v9_0() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName::from(vec!["users".into()]), + table_name: ObjectName(vec!["users".into()]), columns: vec![], }, to: true, @@ -1436,77 +1312,80 @@ fn parse_set() { let stmt = pg_and_generic().verified_stmt("SET a = b"); assert_eq!( stmt, - Statement::Set(Set::SingleAssignment { - scope: None, + Statement::SetVariable { + local: false, hivevar: false, - variable: ObjectName::from(vec![Ident::new("a")]), - values: vec![Expr::Identifier(Ident { + variables: OneOrManyWithParens::One(ObjectName(vec![Ident::new("a")])), + value: vec![Expr::Identifier(Ident { value: "b".into(), - quote_style: None, - span: Span::empty(), + quote_style: None })], - }) + } ); let stmt = pg_and_generic().verified_stmt("SET a = 'b'"); assert_eq!( stmt, - Statement::Set(Set::SingleAssignment { - scope: None, + Statement::SetVariable { + local: false, hivevar: false, - variable: ObjectName::from(vec![Ident::new("a")]), - values: vec![Expr::Value( - (Value::SingleQuotedString("b".into())).with_empty_span() - )], - }) + variables: OneOrManyWithParens::One(ObjectName(vec![Ident::new("a")])), + value: vec![Expr::Value(Value::SingleQuotedString("b".into()))], + } ); let stmt = pg_and_generic().verified_stmt("SET a = 0"); assert_eq!( stmt, - Statement::Set(Set::SingleAssignment { - scope: None, + Statement::SetVariable { + local: false, hivevar: false, - variable: ObjectName::from(vec![Ident::new("a")]), - values: vec![Expr::value(number("0"))], - }) + variables: OneOrManyWithParens::One(ObjectName(vec![Ident::new("a")])), + value: vec![Expr::Value(number("0"))], + } ); let stmt = pg_and_generic().verified_stmt("SET a = DEFAULT"); assert_eq!( stmt, - Statement::Set(Set::SingleAssignment { - scope: None, + Statement::SetVariable { + local: false, hivevar: false, - variable: ObjectName::from(vec![Ident::new("a")]), - values: vec![Expr::Identifier(Ident::new("DEFAULT"))], - }) + variables: OneOrManyWithParens::One(ObjectName(vec![Ident::new("a")])), + value: vec![Expr::Identifier(Ident { + value: "DEFAULT".into(), + quote_style: None + })], + } ); let stmt = pg_and_generic().verified_stmt("SET LOCAL a = b"); assert_eq!( stmt, - Statement::Set(Set::SingleAssignment { - scope: Some(ContextModifier::Local), + Statement::SetVariable { + local: true, hivevar: false, - variable: ObjectName::from(vec![Ident::new("a")]), - values: vec![Expr::Identifier("b".into())], - }) + variables: OneOrManyWithParens::One(ObjectName(vec![Ident::new("a")])), + value: vec![Expr::Identifier("b".into())], + } ); let stmt = pg_and_generic().verified_stmt("SET a.b.c = b"); assert_eq!( stmt, - Statement::Set(Set::SingleAssignment { - scope: None, + Statement::SetVariable { + local: false, hivevar: false, - variable: ObjectName::from(vec![Ident::new("a"), Ident::new("b"), Ident::new("c")]), - values: vec![Expr::Identifier(Ident { + variables: OneOrManyWithParens::One(ObjectName(vec![ + Ident::new("a"), + Ident::new("b"), + Ident::new("c") + ])), + value: vec![Expr::Identifier(Ident { value: "b".into(), - quote_style: None, - span: Span::empty(), + quote_style: None })], - }) + } ); let stmt = pg_and_generic().one_statement_parses_to( @@ -1515,21 +1394,22 @@ fn parse_set() { ); assert_eq!( stmt, - Statement::Set(Set::SingleAssignment { - scope: None, + Statement::SetVariable { + local: false, hivevar: false, - variable: ObjectName::from(vec![ + variables: OneOrManyWithParens::One(ObjectName(vec![ Ident::new("hive"), Ident::new("tez"), Ident::new("auto"), Ident::new("reducer"), Ident::new("parallelism") - ]), - values: vec![Expr::Value((Value::Boolean(false)).with_empty_span())], - }) + ])), + value: vec![Expr::Value(Value::Boolean(false))], + } ); pg_and_generic().one_statement_parses_to("SET a TO b", "SET a = b"); + pg_and_generic().one_statement_parses_to("SET SESSION a = b", "SET a = b"); assert_eq!( pg_and_generic().parse_sql_statements("SET"), @@ -1559,10 +1439,10 @@ fn parse_set_role() { let stmt = pg_and_generic().verified_stmt(query); assert_eq!( stmt, - Statement::Set(Set::SetRole { - context_modifier: Some(ContextModifier::Session), + Statement::SetRole { + context_modifier: ContextModifier::Session, role_name: None, - }) + } ); assert_eq!(query, stmt.to_string()); @@ -1570,14 +1450,13 @@ fn parse_set_role() { let stmt = pg_and_generic().verified_stmt(query); assert_eq!( stmt, - Statement::Set(Set::SetRole { - context_modifier: Some(ContextModifier::Local), + Statement::SetRole { + context_modifier: ContextModifier::Local, role_name: Some(Ident { value: "rolename".to_string(), quote_style: Some('\"'), - span: Span::empty(), }), - }) + } ); assert_eq!(query, stmt.to_string()); @@ -1585,14 +1464,13 @@ fn parse_set_role() { let stmt = pg_and_generic().verified_stmt(query); assert_eq!( stmt, - Statement::Set(Set::SetRole { - context_modifier: None, + Statement::SetRole { + context_modifier: ContextModifier::None, role_name: Some(Ident { value: "rolename".to_string(), quote_style: Some('\''), - span: Span::empty(), }), - }) + } ); assert_eq!(query, stmt.to_string()); } @@ -1661,12 +1539,9 @@ fn parse_execute() { assert_eq!( stmt, Statement::Execute { - name: Some(ObjectName::from(vec!["a".into()])), + name: "a".into(), parameters: vec![], - has_parentheses: false, - using: vec![], - immediate: false, - into: vec![] + using: vec![] } ); @@ -1674,15 +1549,12 @@ fn parse_execute() { assert_eq!( stmt, Statement::Execute { - name: Some(ObjectName::from(vec!["a".into()])), + name: "a".into(), parameters: vec![ - Expr::value(number("1")), - Expr::Value((Value::SingleQuotedString("t".to_string())).with_empty_span()) + Expr::Value(number("1")), + Expr::Value(Value::SingleQuotedString("t".to_string())) ], - has_parentheses: true, - using: vec![], - immediate: false, - into: vec![] + using: vec![] } ); @@ -1691,35 +1563,22 @@ fn parse_execute() { assert_eq!( stmt, Statement::Execute { - name: Some(ObjectName::from(vec!["a".into()])), + name: "a".into(), parameters: vec![], - has_parentheses: false, using: vec![ - ExprWithAlias { - expr: Expr::Cast { - kind: CastKind::Cast, - expr: Box::new(Expr::Value( - (Value::Number("1337".parse().unwrap(), false)).with_empty_span() - )), - data_type: DataType::SmallInt(None), - format: None - }, - alias: None + Expr::Cast { + kind: CastKind::Cast, + expr: Box::new(Expr::Value(Value::Number("1337".parse().unwrap(), false))), + data_type: DataType::SmallInt(None), + format: None }, - ExprWithAlias { - expr: Expr::Cast { - kind: CastKind::Cast, - expr: Box::new(Expr::Value( - (Value::Number("7331".parse().unwrap(), false)).with_empty_span() - )), - data_type: DataType::SmallInt(None), - format: None - }, - alias: None + Expr::Cast { + kind: CastKind::Cast, + expr: Box::new(Expr::Value(Value::Number("7331".parse().unwrap(), false))), + data_type: DataType::SmallInt(None), + format: None }, - ], - immediate: false, - into: vec![] + ] } ); } @@ -1744,7 +1603,7 @@ fn parse_prepare() { }; match sub_stmt.as_ref() { Statement::Insert(Insert { - table: table_name, + table_name, columns, source: Some(source), .. @@ -1812,9 +1671,7 @@ fn parse_pg_on_conflict() { assert_eq!( OnConflictAction::DoUpdate(DoUpdate { assignments: vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from( - vec!["dname".into()] - )), + target: AssignmentTarget::ColumnName(ObjectName(vec!["dname".into()])), value: Expr::CompoundIdentifier(vec!["EXCLUDED".into(), "dname".into()]) },], selection: None @@ -1845,18 +1702,14 @@ fn parse_pg_on_conflict() { OnConflictAction::DoUpdate(DoUpdate { assignments: vec![ Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![ - "dname".into() - ])), + target: AssignmentTarget::ColumnName(ObjectName(vec!["dname".into()])), value: Expr::CompoundIdentifier(vec![ "EXCLUDED".into(), "dname".into() ]) }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from(vec![ - "area".into() - ])), + target: AssignmentTarget::ColumnName(ObjectName(vec!["area".into()])), value: Expr::CompoundIdentifier(vec!["EXCLUDED".into(), "area".into()]) }, ], @@ -1906,23 +1759,16 @@ fn parse_pg_on_conflict() { assert_eq!( OnConflictAction::DoUpdate(DoUpdate { assignments: vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from( - vec!["dname".into()] - )), - value: Expr::Value( - (Value::Placeholder("$1".to_string())).with_empty_span() - ) + target: AssignmentTarget::ColumnName(ObjectName(vec!["dname".into()])), + value: Expr::Value(Value::Placeholder("$1".to_string())) },], selection: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident { value: "dsize".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None })), op: BinaryOperator::Gt, - right: Box::new(Expr::Value( - (Value::Placeholder("$2".to_string())).with_empty_span() - )) + right: Box::new(Expr::Value(Value::Placeholder("$2".to_string()))) }) }), action @@ -1946,30 +1792,20 @@ fn parse_pg_on_conflict() { })), .. }) => { - assert_eq!( - ObjectName::from(vec![Ident::from("distributors_did_pkey")]), - cname - ); + assert_eq!(vec![Ident::from("distributors_did_pkey")], cname.0); assert_eq!( OnConflictAction::DoUpdate(DoUpdate { assignments: vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName::from( - vec!["dname".into()] - )), - value: Expr::Value( - (Value::Placeholder("$1".to_string())).with_empty_span() - ) + target: AssignmentTarget::ColumnName(ObjectName(vec!["dname".into()])), + value: Expr::Value(Value::Placeholder("$1".to_string())) },], selection: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident { value: "dsize".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None })), op: BinaryOperator::Gt, - right: Box::new(Expr::Value( - (Value::Placeholder("$2".to_string())).with_empty_span() - )) + right: Box::new(Expr::Value(Value::Placeholder("$2".to_string()))) }) }), action @@ -2084,8 +1920,12 @@ fn parse_pg_custom_binary_ops() { let operators = [ // PostGIS "&&&", // n-D bounding boxes intersect + "&<", // (is strictly to the left of) + "&>", // (is strictly to the right of) "|=|", // distance between A and B trajectories at their closest point of approach "<<#>>", // n-D distance between A and B bounding boxes + "|>>", // A's bounding box is strictly above B's. + "~=", // bounding box is the same // PGroonga "&@", // Full text search by a keyword "&@~", // Full text search by easy to use query language @@ -2185,13 +2025,9 @@ fn parse_pg_regex_match_ops() { let select = pg().verified_only_select(&format!("SELECT 'abc' {} '^a'", &str_op)); assert_eq!( SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Value( - (Value::SingleQuotedString("abc".into())).with_empty_span() - )), + left: Box::new(Expr::Value(Value::SingleQuotedString("abc".into()))), op: op.clone(), - right: Box::new(Expr::Value( - (Value::SingleQuotedString("^a".into())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString("^a".into()))), }), select.projection[0] ); @@ -2211,13 +2047,9 @@ fn parse_pg_like_match_ops() { let select = pg().verified_only_select(&format!("SELECT 'abc' {} 'a_c%'", &str_op)); assert_eq!( SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Value( - (Value::SingleQuotedString("abc".into())).with_empty_span() - )), + left: Box::new(Expr::Value(Value::SingleQuotedString("abc".into()))), op: op.clone(), - right: Box::new(Expr::Value( - (Value::SingleQuotedString("a_c%".into())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString("a_c%".into()))), }), select.projection[0] ); @@ -2227,17 +2059,17 @@ fn parse_pg_like_match_ops() { #[test] fn parse_array_index_expr() { let num: Vec = (0..=10) - .map(|s| Expr::Value(number(&s.to_string()).with_empty_span())) + .map(|s| Expr::Value(number(&s.to_string()))) .collect(); let sql = "SELECT foo[0] FROM foos"; let select = pg_and_generic().verified_only_select(sql); assert_eq!( - &Expr::CompoundFieldAccess { - root: Box::new(Expr::Identifier(Ident::new("foo"))), - access_chain: vec![AccessExpr::Subscript(Subscript::Index { + &Expr::Subscript { + expr: Box::new(Expr::Identifier(Ident::new("foo"))), + subscript: Box::new(Subscript::Index { index: num[0].clone() - })], + }), }, expr_from_projection(only(&select.projection)), ); @@ -2245,16 +2077,16 @@ fn parse_array_index_expr() { let sql = "SELECT foo[0][0] FROM foos"; let select = pg_and_generic().verified_only_select(sql); assert_eq!( - &Expr::CompoundFieldAccess { - root: Box::new(Expr::Identifier(Ident::new("foo"))), - access_chain: vec![ - AccessExpr::Subscript(Subscript::Index { + &Expr::Subscript { + expr: Box::new(Expr::Subscript { + expr: Box::new(Expr::Identifier(Ident::new("foo"))), + subscript: Box::new(Subscript::Index { index: num[0].clone() }), - AccessExpr::Subscript(Subscript::Index { - index: num[0].clone() - }) - ], + }), + subscript: Box::new(Subscript::Index { + index: num[0].clone() + }), }, expr_from_projection(only(&select.projection)), ); @@ -2262,27 +2094,27 @@ fn parse_array_index_expr() { let sql = r#"SELECT bar[0]["baz"]["fooz"] FROM foos"#; let select = pg_and_generic().verified_only_select(sql); assert_eq!( - &Expr::CompoundFieldAccess { - root: Box::new(Expr::Identifier(Ident::new("bar"))), - access_chain: vec![ - AccessExpr::Subscript(Subscript::Index { - index: num[0].clone() + &Expr::Subscript { + expr: Box::new(Expr::Subscript { + expr: Box::new(Expr::Subscript { + expr: Box::new(Expr::Identifier(Ident::new("bar"))), + subscript: Box::new(Subscript::Index { + index: num[0].clone() + }) }), - AccessExpr::Subscript(Subscript::Index { + subscript: Box::new(Subscript::Index { index: Expr::Identifier(Ident { value: "baz".to_string(), - quote_style: Some('"'), - span: Span::empty(), + quote_style: Some('"') }) - }), - AccessExpr::Subscript(Subscript::Index { - index: Expr::Identifier(Ident { - value: "fooz".to_string(), - quote_style: Some('"'), - span: Span::empty(), - }) - }), - ], + }) + }), + subscript: Box::new(Subscript::Index { + index: Expr::Identifier(Ident { + value: "fooz".to_string(), + quote_style: Some('"') + }) + }) }, expr_from_projection(only(&select.projection)), ); @@ -2290,33 +2122,33 @@ fn parse_array_index_expr() { let sql = "SELECT (CAST(ARRAY[ARRAY[2, 3]] AS INT[][]))[1][2]"; let select = pg_and_generic().verified_only_select(sql); assert_eq!( - &Expr::CompoundFieldAccess { - root: Box::new(Expr::Nested(Box::new(Expr::Cast { - kind: CastKind::Cast, - expr: Box::new(Expr::Array(Array { - elem: vec![Expr::Array(Array { - elem: vec![num[2].clone(), num[3].clone(),], + &Expr::Subscript { + expr: Box::new(Expr::Subscript { + expr: Box::new(Expr::Nested(Box::new(Expr::Cast { + kind: CastKind::Cast, + expr: Box::new(Expr::Array(Array { + elem: vec![Expr::Array(Array { + elem: vec![num[2].clone(), num[3].clone(),], + named: true, + })], named: true, - })], - named: true, - })), - data_type: DataType::Array(ArrayElemTypeDef::SquareBracket( - Box::new(DataType::Array(ArrayElemTypeDef::SquareBracket( - Box::new(DataType::Int(None)), + })), + data_type: DataType::Array(ArrayElemTypeDef::SquareBracket( + Box::new(DataType::Array(ArrayElemTypeDef::SquareBracket( + Box::new(DataType::Int(None)), + None + ))), None - ))), - None - )), - format: None, - }))), - access_chain: vec![ - AccessExpr::Subscript(Subscript::Index { + )), + format: None, + }))), + subscript: Box::new(Subscript::Index { index: num[1].clone() }), - AccessExpr::Subscript(Subscript::Index { - index: num[2].clone() - }), - ], + }), + subscript: Box::new(Subscript::Index { + index: num[2].clone() + }), }, expr_from_projection(only(&select.projection)), ); @@ -2338,7 +2170,7 @@ fn parse_array_subscript() { ( "(ARRAY[1, 2, 3, 4, 5, 6])[2]", Subscript::Index { - index: Expr::value(number("2")), + index: Expr::Value(number("2")), }, ), ( @@ -2350,17 +2182,17 @@ fn parse_array_subscript() { ( "(ARRAY[1, 2, 3, 4, 5, 6])[2:5]", Subscript::Slice { - lower_bound: Some(Expr::value(number("2"))), - upper_bound: Some(Expr::value(number("5"))), + lower_bound: Some(Expr::Value(number("2"))), + upper_bound: Some(Expr::Value(number("5"))), stride: None, }, ), ( "(ARRAY[1, 2, 3, 4, 5, 6])[2:5:3]", Subscript::Slice { - lower_bound: Some(Expr::value(number("2"))), - upper_bound: Some(Expr::value(number("5"))), - stride: Some(Expr::value(number("3"))), + lower_bound: Some(Expr::Value(number("2"))), + upper_bound: Some(Expr::Value(number("5"))), + stride: Some(Expr::Value(number("3"))), }, ), ( @@ -2369,12 +2201,12 @@ fn parse_array_subscript() { lower_bound: Some(Expr::BinaryOp { left: Box::new(call("array_length", [Expr::Identifier(Ident::new("arr"))])), op: BinaryOperator::Minus, - right: Box::new(Expr::value(number("3"))), + right: Box::new(Expr::Value(number("3"))), }), upper_bound: Some(Expr::BinaryOp { left: Box::new(call("array_length", [Expr::Identifier(Ident::new("arr"))])), op: BinaryOperator::Minus, - right: Box::new(Expr::value(number("1"))), + right: Box::new(Expr::Value(number("1"))), }), stride: None, }, @@ -2383,14 +2215,14 @@ fn parse_array_subscript() { "(ARRAY[1, 2, 3, 4, 5, 6])[:5]", Subscript::Slice { lower_bound: None, - upper_bound: Some(Expr::value(number("5"))), + upper_bound: Some(Expr::Value(number("5"))), stride: None, }, ), ( "(ARRAY[1, 2, 3, 4, 5, 6])[2:]", Subscript::Slice { - lower_bound: Some(Expr::value(number("2"))), + lower_bound: Some(Expr::Value(number("2"))), upper_bound: None, stride: None, }, @@ -2405,13 +2237,9 @@ fn parse_array_subscript() { ), ]; for (sql, expect) in tests { - let Expr::CompoundFieldAccess { access_chain, .. } = pg_and_generic().verified_expr(sql) - else { + let Expr::Subscript { subscript, .. } = pg_and_generic().verified_expr(sql) else { panic!("expected subscript expr"); }; - let Some(AccessExpr::Subscript(subscript)) = access_chain.last() else { - panic!("expected subscript"); - }; assert_eq!(expect, *subscript); } @@ -2422,25 +2250,25 @@ fn parse_array_subscript() { fn parse_array_multi_subscript() { let expr = pg_and_generic().verified_expr("make_array(1, 2, 3)[1:2][2]"); assert_eq!( - Expr::CompoundFieldAccess { - root: Box::new(call( - "make_array", - vec![ - Expr::value(number("1")), - Expr::value(number("2")), - Expr::value(number("3")) - ] - )), - access_chain: vec![ - AccessExpr::Subscript(Subscript::Slice { - lower_bound: Some(Expr::value(number("1"))), - upper_bound: Some(Expr::value(number("2"))), + Expr::Subscript { + expr: Box::new(Expr::Subscript { + expr: Box::new(call( + "make_array", + vec![ + Expr::Value(number("1")), + Expr::Value(number("2")), + Expr::Value(number("3")) + ] + )), + subscript: Box::new(Subscript::Slice { + lower_bound: Some(Expr::Value(number("1"))), + upper_bound: Some(Expr::Value(number("2"))), stride: None, }), - AccessExpr::Subscript(Subscript::Index { - index: Expr::value(number("2")), - }), - ], + }), + subscript: Box::new(Subscript::Index { + index: Expr::Value(number("2")), + }), }, expr, ); @@ -2508,271 +2336,6 @@ fn parse_create_anonymous_index() { } } -#[test] -/// Test to verify the correctness of parsing the `CREATE INDEX` statement with optional operator classes. -/// -/// # Implementative details -/// -/// At this time, since the parser library is not intended to take care of the semantics of the SQL statements, -/// there is no way to verify the correctness of the operator classes, nor whether they are valid for the given -/// index type. This test is only intended to verify that the parser can correctly parse the statement. For this -/// reason, the test includes a `totally_not_valid` operator class. -fn parse_create_indices_with_operator_classes() { - let indices = [ - IndexType::GIN, - IndexType::GiST, - IndexType::SPGiST, - IndexType::Custom("CustomIndexType".into()), - ]; - let operator_classes: [Option; 4] = [ - None, - Some("gin_trgm_ops".into()), - Some("gist_trgm_ops".into()), - Some("totally_not_valid".into()), - ]; - - for expected_index_type in indices { - for expected_operator_class in &operator_classes { - let single_column_sql_statement = format!( - "CREATE INDEX the_index_name ON users USING {expected_index_type} (concat_users_name(first_name, last_name){})", - expected_operator_class.as_ref().map(|oc| format!(" {oc}")) - .unwrap_or_default() - ); - let multi_column_sql_statement = format!( - "CREATE INDEX the_index_name ON users USING {expected_index_type} (column_name,concat_users_name(first_name, last_name){})", - expected_operator_class.as_ref().map(|oc| format!(" {oc}")) - .unwrap_or_default() - ); - - let expected_function_column = IndexColumn { - column: OrderByExpr { - expr: Expr::Function(Function { - name: ObjectName(vec![ObjectNamePart::Identifier(Ident { - value: "concat_users_name".to_owned(), - quote_style: None, - span: Span::empty(), - })]), - uses_odbc_syntax: false, - parameters: FunctionArguments::None, - args: FunctionArguments::List(FunctionArgumentList { - duplicate_treatment: None, - args: vec![ - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Identifier( - Ident { - value: "first_name".to_owned(), - quote_style: None, - span: Span::empty(), - }, - ))), - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Identifier( - Ident { - value: "last_name".to_owned(), - quote_style: None, - span: Span::empty(), - }, - ))), - ], - clauses: vec![], - }), - filter: None, - null_treatment: None, - over: None, - within_group: vec![], - }), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, - with_fill: None, - }, - operator_class: expected_operator_class.clone(), - }; - - match pg().verified_stmt(&single_column_sql_statement) { - Statement::CreateIndex(CreateIndex { - name: Some(ObjectName(name)), - table_name: ObjectName(table_name), - using: Some(using), - columns, - unique: false, - concurrently: false, - if_not_exists: false, - include, - nulls_distinct: None, - with, - predicate: None, - }) => { - assert_eq_vec(&["the_index_name"], &name); - assert_eq_vec(&["users"], &table_name); - assert_eq!(expected_index_type, using); - assert_eq!(expected_function_column, columns[0],); - assert!(include.is_empty()); - assert!(with.is_empty()); - } - _ => unreachable!(), - } - - match pg().verified_stmt(&multi_column_sql_statement) { - Statement::CreateIndex(CreateIndex { - name: Some(ObjectName(name)), - table_name: ObjectName(table_name), - using: Some(using), - columns, - unique: false, - concurrently: false, - if_not_exists: false, - include, - nulls_distinct: None, - with, - predicate: None, - }) => { - assert_eq_vec(&["the_index_name"], &name); - assert_eq_vec(&["users"], &table_name); - assert_eq!(expected_index_type, using); - assert_eq!( - IndexColumn { - column: OrderByExpr { - expr: Expr::Identifier(Ident { - value: "column_name".to_owned(), - quote_style: None, - span: Span::empty() - }), - options: OrderByOptions { - asc: None, - nulls_first: None, - }, - with_fill: None, - }, - operator_class: None - }, - columns[0], - ); - assert_eq!(expected_function_column, columns[1],); - assert!(include.is_empty()); - assert!(with.is_empty()); - } - _ => unreachable!(), - } - } - } -} - -#[test] -fn parse_create_bloom() { - let sql = - "CREATE INDEX bloomidx ON tbloom USING BLOOM (i1,i2,i3) WITH (length = 80, col1 = 2, col2 = 2, col3 = 4)"; - match pg().verified_stmt(sql) { - Statement::CreateIndex(CreateIndex { - name: Some(ObjectName(name)), - table_name: ObjectName(table_name), - using: Some(using), - columns, - unique: false, - concurrently: false, - if_not_exists: false, - include, - nulls_distinct: None, - with, - predicate: None, - }) => { - assert_eq_vec(&["bloomidx"], &name); - assert_eq_vec(&["tbloom"], &table_name); - assert_eq!(IndexType::Bloom, using); - assert_eq_vec(&["i1", "i2", "i3"], &columns); - assert!(include.is_empty()); - assert_eq!( - vec![ - Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("length"))), - op: BinaryOperator::Eq, - right: Box::new(Expr::Value(number("80").into())), - }, - Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("col1"))), - op: BinaryOperator::Eq, - right: Box::new(Expr::Value(number("2").into())), - }, - Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("col2"))), - op: BinaryOperator::Eq, - right: Box::new(Expr::Value(number("2").into())), - }, - Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("col3"))), - op: BinaryOperator::Eq, - right: Box::new(Expr::Value(number("4").into())), - }, - ], - with - ); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_create_brin() { - let sql = "CREATE INDEX brin_sensor_data_recorded_at ON sensor_data USING BRIN (recorded_at)"; - match pg().verified_stmt(sql) { - Statement::CreateIndex(CreateIndex { - name: Some(ObjectName(name)), - table_name: ObjectName(table_name), - using: Some(using), - columns, - unique: false, - concurrently: false, - if_not_exists: false, - include, - nulls_distinct: None, - with, - predicate: None, - }) => { - assert_eq_vec(&["brin_sensor_data_recorded_at"], &name); - assert_eq_vec(&["sensor_data"], &table_name); - assert_eq!(IndexType::BRIN, using); - assert_eq_vec(&["recorded_at"], &columns); - assert!(include.is_empty()); - assert!(with.is_empty()); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_create_table_with_inherits() { - let single_inheritance_sql = - "CREATE TABLE child_table (child_column INT) INHERITS (public.parent_table)"; - match pg().verified_stmt(single_inheritance_sql) { - Statement::CreateTable(CreateTable { - inherits: Some(inherits), - .. - }) => { - assert_eq_vec(&["public", "parent_table"], &inherits[0].0); - } - _ => unreachable!(), - } - - let double_inheritance_sql = "CREATE TABLE child_table (child_column INT) INHERITS (public.parent_table, pg_catalog.pg_settings)"; - match pg().verified_stmt(double_inheritance_sql) { - Statement::CreateTable(CreateTable { - inherits: Some(inherits), - .. - }) => { - assert_eq_vec(&["public", "parent_table"], &inherits[0].0); - assert_eq_vec(&["pg_catalog", "pg_settings"], &inherits[1].0); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_create_table_with_empty_inherits_fails() { - assert!(matches!( - pg().parse_sql_statements("CREATE TABLE child_table (child_column INT) INHERITS ()"), - Err(ParserError::ParserError(_)) - )); -} - #[test] fn parse_create_index_concurrently() { let sql = "CREATE INDEX CONCURRENTLY IF NOT EXISTS my_index ON my_table(col1,col2)"; @@ -2933,8 +2496,7 @@ fn parse_array_subquery_expr() { let select = pg().verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("ARRAY")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("ARRAY")]), parameters: FunctionArguments::None, args: FunctionArguments::Subquery(Box::new(Query { with: None, @@ -2942,14 +2504,10 @@ fn parse_array_subquery_expr() { op: SetOperator::Union, set_quantifier: SetQuantifier::None, left: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, - projection: vec![SelectItem::UnnamedExpr(Expr::Value( - (number("1")).with_empty_span() - ))], - exclude: None, + projection: vec![SelectItem::UnnamedExpr(Expr::Value(number("1")))], into: None, from: vec![], lateral_views: vec![], @@ -2965,17 +2523,12 @@ fn parse_array_subquery_expr() { window_before_qualify: false, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), right: Box::new(SetExpr::Select(Box::new(Select { - select_token: AttachedToken::empty(), distinct: None, top: None, top_before_distinct: false, - projection: vec![SelectItem::UnnamedExpr(Expr::Value( - (number("2")).with_empty_span() - ))], - exclude: None, + projection: vec![SelectItem::UnnamedExpr(Expr::Value(number("2")))], into: None, from: vec![], lateral_views: vec![], @@ -2991,17 +2544,17 @@ fn parse_array_subquery_expr() { window_before_qualify: false, value_table_mode: None, connect_by: None, - flavor: SelectFlavor::Standard, }))), }), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), filter: None, null_treatment: None, @@ -3017,16 +2570,16 @@ fn test_transaction_statement() { let statement = pg().verified_stmt("SET TRANSACTION SNAPSHOT '000003A1-1'"); assert_eq!( statement, - Statement::Set(Set::SetTransaction { + Statement::SetTransaction { modes: vec![], snapshot: Some(Value::SingleQuotedString(String::from("000003A1-1"))), session: false - }) + } ); let statement = pg().verified_stmt("SET SESSION CHARACTERISTICS AS TRANSACTION READ ONLY, READ WRITE, ISOLATION LEVEL SERIALIZABLE"); assert_eq!( statement, - Statement::Set(Set::SetTransaction { + Statement::SetTransaction { modes: vec![ TransactionMode::AccessMode(TransactionAccessMode::ReadOnly), TransactionMode::AccessMode(TransactionAccessMode::ReadWrite), @@ -3034,7 +2587,7 @@ fn test_transaction_statement() { ], snapshot: None, session: true - }) + } ); } @@ -3046,9 +2599,7 @@ fn test_json() { SelectItem::UnnamedExpr(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("params"))), op: BinaryOperator::LongArrow, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("name".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString("name".to_string()))), }), select.projection[0] ); @@ -3059,9 +2610,7 @@ fn test_json() { SelectItem::UnnamedExpr(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("params"))), op: BinaryOperator::Arrow, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("name".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString("name".to_string()))), }), select.projection[0] ); @@ -3073,14 +2622,12 @@ fn test_json() { left: Box::new(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("info"))), op: BinaryOperator::Arrow, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("items".to_string())).with_empty_span() - )) + right: Box::new(Expr::Value(Value::SingleQuotedString("items".to_string()))) }), op: BinaryOperator::LongArrow, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("product".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString( + "product".to_string() + ))), }), select.projection[0] ); @@ -3092,7 +2639,7 @@ fn test_json() { SelectItem::UnnamedExpr(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("obj"))), op: BinaryOperator::Arrow, - right: Box::new(Expr::value(number("42"))), + right: Box::new(Expr::Value(number("42"))), }), select.projection[0] ); @@ -3117,9 +2664,9 @@ fn test_json() { left: Box::new(Expr::Identifier(Ident::new("obj"))), op: BinaryOperator::Arrow, right: Box::new(Expr::BinaryOp { - left: Box::new(Expr::value(number("3"))), + left: Box::new(Expr::Value(number("3"))), op: BinaryOperator::Multiply, - right: Box::new(Expr::value(number("2"))), + right: Box::new(Expr::Value(number("2"))), }), }), select.projection[0] @@ -3131,9 +2678,9 @@ fn test_json() { SelectItem::UnnamedExpr(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("info"))), op: BinaryOperator::HashArrow, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("{a,b,c}".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString( + "{a,b,c}".to_string() + ))), }), select.projection[0] ); @@ -3144,9 +2691,9 @@ fn test_json() { SelectItem::UnnamedExpr(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("info"))), op: BinaryOperator::HashLongArrow, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("{a,b,c}".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString( + "{a,b,c}".to_string() + ))), }), select.projection[0] ); @@ -3157,9 +2704,9 @@ fn test_json() { Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("info"))), op: BinaryOperator::AtArrow, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("{\"a\": 1}".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString( + "{\"a\": 1}".to_string() + ))), }, select.selection.unwrap(), ); @@ -3168,9 +2715,9 @@ fn test_json() { let select = pg().verified_only_select(sql); assert_eq!( Expr::BinaryOp { - left: Box::new(Expr::Value( - (Value::SingleQuotedString("{\"a\": 1}".to_string())).with_empty_span() - )), + left: Box::new(Expr::Value(Value::SingleQuotedString( + "{\"a\": 1}".to_string() + ))), op: BinaryOperator::ArrowAt, right: Box::new(Expr::Identifier(Ident::new("info"))), }, @@ -3185,8 +2732,8 @@ fn test_json() { op: BinaryOperator::HashMinus, right: Box::new(Expr::Array(Array { elem: vec![ - Expr::Value((Value::SingleQuotedString("a".to_string())).with_empty_span()), - Expr::Value((Value::SingleQuotedString("b".to_string())).with_empty_span()), + Expr::Value(Value::SingleQuotedString("a".to_string())), + Expr::Value(Value::SingleQuotedString("b".to_string())), ], named: true, })), @@ -3200,9 +2747,7 @@ fn test_json() { Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::from("info"))), op: BinaryOperator::AtQuestion, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("$.a".to_string())).with_empty_span() - ),), + right: Box::new(Expr::Value(Value::SingleQuotedString("$.a".to_string())),), }, select.selection.unwrap(), ); @@ -3213,9 +2758,7 @@ fn test_json() { Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::from("info"))), op: BinaryOperator::AtAt, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("$.a".to_string())).with_empty_span() - ),), + right: Box::new(Expr::Value(Value::SingleQuotedString("$.a".to_string())),), }, select.selection.unwrap(), ); @@ -3226,9 +2769,7 @@ fn test_json() { Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("info"))), op: BinaryOperator::Question, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("b".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString("b".to_string()))), }, select.selection.unwrap(), ); @@ -3241,8 +2782,8 @@ fn test_json() { op: BinaryOperator::QuestionAnd, right: Box::new(Expr::Array(Array { elem: vec![ - Expr::Value((Value::SingleQuotedString("b".to_string())).with_empty_span()), - Expr::Value((Value::SingleQuotedString("c".to_string())).with_empty_span()) + Expr::Value(Value::SingleQuotedString("b".to_string())), + Expr::Value(Value::SingleQuotedString("c".to_string())) ], named: true })) @@ -3258,8 +2799,8 @@ fn test_json() { op: BinaryOperator::QuestionPipe, right: Box::new(Expr::Array(Array { elem: vec![ - Expr::Value((Value::SingleQuotedString("b".to_string())).with_empty_span()), - Expr::Value((Value::SingleQuotedString("c".to_string())).with_empty_span()) + Expr::Value(Value::SingleQuotedString("b".to_string())), + Expr::Value(Value::SingleQuotedString("c".to_string())) ], named: true })) @@ -3268,19 +2809,6 @@ fn test_json() { ); } -#[test] -fn test_fn_arg_with_value_operator() { - match pg().verified_expr("JSON_OBJECT('name' VALUE 'value')") { - Expr::Function(Function { args: FunctionArguments::List(FunctionArgumentList { args, .. }), .. }) => { - assert!(matches!( - &args[..], - &[FunctionArg::ExprNamed { operator: FunctionArgOperator::Value, .. }] - ), "Invalid function argument: {args:?}"); - } - other => panic!("Expected: JSON_OBJECT('name' VALUE 'value') to be parsed as a function, but got {other:?}"), - } -} - #[test] fn parse_json_table_is_not_reserved() { // JSON_TABLE is not a reserved keyword in PostgreSQL, even though it is in SQL:2023 @@ -3291,10 +2819,7 @@ fn parse_json_table_is_not_reserved() { TableFactor::Table { name: ObjectName(name), .. - } => assert_eq!( - ObjectNamePart::Identifier(Ident::new("JSON_TABLE")), - name[0] - ), + } => assert_eq!("JSON_TABLE", name[0].value), other => panic!("Expected: JSON_TABLE to be parsed as a table name, but got {other:?}"), } } @@ -3303,62 +2828,50 @@ fn parse_json_table_is_not_reserved() { fn test_composite_value() { let sql = "SELECT (on_hand.item).name FROM on_hand WHERE (on_hand.item).price > 9"; let select = pg().verified_only_select(sql); - - let Expr::CompoundFieldAccess { root, access_chain } = - expr_from_projection(&select.projection[0]) - else { - unreachable!("expected projection: got {:?}", &select.projection[0]); - }; assert_eq!( - root.as_ref(), - &Expr::Nested(Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("on_hand"), - Ident::new("item") - ]))) - ); - assert_eq!( - access_chain.as_slice(), - &[AccessExpr::Dot(Expr::Identifier(Ident::new("name")))] + SelectItem::UnnamedExpr(Expr::CompositeAccess { + key: Ident::new("name"), + expr: Box::new(Expr::Nested(Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("on_hand"), + Ident::new("item") + ])))) + }), + select.projection[0] ); assert_eq!( - select.selection.as_ref().unwrap(), - &Expr::BinaryOp { - left: Box::new(Expr::CompoundFieldAccess { - root: Expr::Nested(Box::new(Expr::CompoundIdentifier(vec![ + select.selection, + Some(Expr::BinaryOp { + left: Box::new(Expr::CompositeAccess { + key: Ident::new("price"), + expr: Box::new(Expr::Nested(Box::new(Expr::CompoundIdentifier(vec![ Ident::new("on_hand"), Ident::new("item") - ]))) - .into(), - access_chain: vec![AccessExpr::Dot(Expr::Identifier(Ident::new("price")))] + ])))) }), op: BinaryOperator::Gt, - right: Box::new(Expr::value(number("9"))) - } + right: Box::new(Expr::Value(number("9"))) + }) ); let sql = "SELECT (information_schema._pg_expandarray(ARRAY['i', 'i'])).n"; let select = pg().verified_only_select(sql); assert_eq!( - &Expr::CompoundFieldAccess { - root: Box::new(Expr::Nested(Box::new(Expr::Function(Function { - name: ObjectName::from(vec![ + SelectItem::UnnamedExpr(Expr::CompositeAccess { + key: Ident::new("n"), + expr: Box::new(Expr::Nested(Box::new(Expr::Function(Function { + name: ObjectName(vec![ Ident::new("information_schema"), Ident::new("_pg_expandarray") ]), - uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Array( Array { elem: vec![ - Expr::Value( - (Value::SingleQuotedString("i".to_string())).with_empty_span() - ), - Expr::Value( - (Value::SingleQuotedString("i".to_string())).with_empty_span() - ), + Expr::Value(Value::SingleQuotedString("i".to_string())), + Expr::Value(Value::SingleQuotedString("i".to_string())), ], named: true } @@ -3369,13 +2882,74 @@ fn test_composite_value() { filter: None, over: None, within_group: vec![], - })))), - access_chain: vec![AccessExpr::Dot(Expr::Identifier(Ident::new("n")))], - }, - expr_from_projection(&select.projection[0]) + })))) + }), + select.projection[0] ); } +#[test] +fn parse_comments() { + match pg().verified_stmt("COMMENT ON COLUMN tab.name IS 'comment'") { + Statement::Comment { + object_type, + object_name, + comment: Some(comment), + if_exists, + } => { + assert_eq!("comment", comment); + assert_eq!("tab.name", object_name.to_string()); + assert_eq!(CommentObject::Column, object_type); + assert!(!if_exists); + } + _ => unreachable!(), + } + + match pg().verified_stmt("COMMENT ON EXTENSION plpgsql IS 'comment'") { + Statement::Comment { + object_type, + object_name, + comment: Some(comment), + if_exists, + } => { + assert_eq!("comment", comment); + assert_eq!("plpgsql", object_name.to_string()); + assert_eq!(CommentObject::Extension, object_type); + assert!(!if_exists); + } + _ => unreachable!(), + } + + match pg().verified_stmt("COMMENT ON TABLE public.tab IS 'comment'") { + Statement::Comment { + object_type, + object_name, + comment: Some(comment), + if_exists, + } => { + assert_eq!("comment", comment); + assert_eq!("public.tab", object_name.to_string()); + assert_eq!(CommentObject::Table, object_type); + assert!(!if_exists); + } + _ => unreachable!(), + } + + match pg().verified_stmt("COMMENT IF EXISTS ON TABLE public.tab IS NULL") { + Statement::Comment { + object_type, + object_name, + comment: None, + if_exists, + } => { + assert_eq!("public.tab", object_name.to_string()); + assert_eq!(CommentObject::Table, object_type); + assert!(if_exists); + } + _ => unreachable!(), + } +} + #[test] fn parse_quoted_identifier() { pg_and_generic().verified_stmt(r#"SELECT "quoted "" ident""#); @@ -3418,27 +2992,27 @@ fn parse_escaped_literal_string() { let select = pg_and_generic().verified_only_select(sql); assert_eq!(6, select.projection.len()); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("s1 \n s1".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("s1 \n s1".to_string())), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("s2 \\n s2".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("s2 \\n s2".to_string())), expr_from_projection(&select.projection[1]) ); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("s3 \\\n s3".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("s3 \\\n s3".to_string())), expr_from_projection(&select.projection[2]) ); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("s4 \\\\n s4".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("s4 \\\\n s4".to_string())), expr_from_projection(&select.projection[3]) ); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("'".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("'".to_string())), expr_from_projection(&select.projection[4]) ); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("foo \\".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("foo \\".to_string())), expr_from_projection(&select.projection[5]) ); @@ -3456,31 +3030,31 @@ fn parse_escaped_literal_string() { let select = pg_and_generic().verified_only_select_with_canonical(sql, canonical); assert_eq!(7, select.projection.len()); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("\u{0001}".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("\u{0001}".to_string())), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("\u{10ffff}".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("\u{10ffff}".to_string())), expr_from_projection(&select.projection[1]) ); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("\u{000c}".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("\u{000c}".to_string())), expr_from_projection(&select.projection[2]) ); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("%".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("%".to_string())), expr_from_projection(&select.projection[3]) ); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("\u{0002}".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("\u{0002}".to_string())), expr_from_projection(&select.projection[4]) ); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("%".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("%".to_string())), expr_from_projection(&select.projection[5]) ); assert_eq!( - &Expr::Value((Value::EscapedStringLiteral("%".to_string())).with_empty_span()), + &Expr::Value(Value::EscapedStringLiteral("%".to_string())), expr_from_projection(&select.projection[6]) ); @@ -3528,8 +3102,7 @@ fn parse_current_functions() { let select = pg_and_generic().verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("CURRENT_CATALOG")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("CURRENT_CATALOG")]), parameters: FunctionArguments::None, args: FunctionArguments::None, null_treatment: None, @@ -3541,8 +3114,7 @@ fn parse_current_functions() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("CURRENT_USER")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("CURRENT_USER")]), parameters: FunctionArguments::None, args: FunctionArguments::None, null_treatment: None, @@ -3554,8 +3126,7 @@ fn parse_current_functions() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("SESSION_USER")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("SESSION_USER")]), parameters: FunctionArguments::None, args: FunctionArguments::None, null_treatment: None, @@ -3567,8 +3138,7 @@ fn parse_current_functions() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("USER")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new("USER")]), parameters: FunctionArguments::None, args: FunctionArguments::None, null_treatment: None, @@ -3615,16 +3185,13 @@ fn parse_custom_operator() { left: Box::new(Expr::Identifier(Ident { value: "relname".into(), quote_style: None, - span: Span::empty(), })), op: BinaryOperator::PGCustomBinaryOperator(vec![ "database".into(), "pg_catalog".into(), "~".into() ]), - right: Box::new(Expr::Value( - (Value::SingleQuotedString("^(table)$".into())).with_empty_span() - )) + right: Box::new(Expr::Value(Value::SingleQuotedString("^(table)$".into()))) }) ); @@ -3637,12 +3204,9 @@ fn parse_custom_operator() { left: Box::new(Expr::Identifier(Ident { value: "relname".into(), quote_style: None, - span: Span::empty(), })), op: BinaryOperator::PGCustomBinaryOperator(vec!["pg_catalog".into(), "~".into()]), - right: Box::new(Expr::Value( - (Value::SingleQuotedString("^(table)$".into())).with_empty_span() - )) + right: Box::new(Expr::Value(Value::SingleQuotedString("^(table)$".into()))) }) ); @@ -3655,12 +3219,9 @@ fn parse_custom_operator() { left: Box::new(Expr::Identifier(Ident { value: "relname".into(), quote_style: None, - span: Span::empty(), })), op: BinaryOperator::PGCustomBinaryOperator(vec!["~".into()]), - right: Box::new(Expr::Value( - (Value::SingleQuotedString("^(table)$".into())).with_empty_span() - )) + right: Box::new(Expr::Value(Value::SingleQuotedString("^(table)$".into()))) }) ); } @@ -3746,9 +3307,9 @@ fn parse_create_role() { assert_eq!(*bypassrls, Some(true)); assert_eq!( *password, - Some(Password::Password(Expr::Value( - (Value::SingleQuotedString("abcdef".into())).with_empty_span() - ))) + Some(Password::Password(Expr::Value(Value::SingleQuotedString( + "abcdef".into() + )))) ); assert_eq!(*superuser, Some(true)); assert_eq!(*create_db, Some(false)); @@ -3757,9 +3318,7 @@ fn parse_create_role() { assert_eq!(*connection_limit, None); assert_eq!( *valid_until, - Some(Expr::Value( - (Value::SingleQuotedString("2025-01-01".into())).with_empty_span() - )) + Some(Expr::Value(Value::SingleQuotedString("2025-01-01".into()))) ); assert_eq_vec(&["role1", "role2"], in_role); assert!(in_group.is_empty()); @@ -3810,14 +3369,12 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "old_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, operation: AlterRoleOperation::RenameRole { role_name: Ident { value: "new_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None } }, } @@ -3829,8 +3386,7 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, operation: AlterRoleOperation::WithOptions { options: vec![ @@ -3841,15 +3397,13 @@ fn parse_alter_role() { RoleOption::Login(true), RoleOption::Replication(true), RoleOption::BypassRLS(true), - RoleOption::ConnectionLimit(Expr::value(number("100"))), + RoleOption::ConnectionLimit(Expr::Value(number("100"))), RoleOption::Password({ - Password::Password(Expr::Value( - (Value::SingleQuotedString("abcdef".into())).with_empty_span(), - )) + Password::Password(Expr::Value(Value::SingleQuotedString("abcdef".into()))) }), - RoleOption::ValidUntil(Expr::Value( - (Value::SingleQuotedString("2025-01-01".into(),)).with_empty_span() - )) + RoleOption::ValidUntil(Expr::Value(Value::SingleQuotedString( + "2025-01-01".into(), + ))) ] }, } @@ -3861,8 +3415,7 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, operation: AlterRoleOperation::WithOptions { options: vec![ @@ -3885,14 +3438,12 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, operation: AlterRoleOperation::Set { - config_name: ObjectName::from(vec![Ident { + config_name: ObjectName(vec![Ident { value: "maintenance_work_mem".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }]), config_value: SetConfigValue::FromCurrent, in_database: None @@ -3906,22 +3457,17 @@ fn parse_alter_role() { [Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, operation: AlterRoleOperation::Set { - config_name: ObjectName::from(vec![Ident { + config_name: ObjectName(vec![Ident { value: "maintenance_work_mem".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }]), - config_value: SetConfigValue::Value(Expr::Value( - (number("100000")).with_empty_span() - )), - in_database: Some(ObjectName::from(vec![Ident { + config_value: SetConfigValue::Value(Expr::Value(number("100000"))), + in_database: Some(ObjectName(vec![Ident { value: "database_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }])) }, }] @@ -3933,22 +3479,17 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, operation: AlterRoleOperation::Set { - config_name: ObjectName::from(vec![Ident { + config_name: ObjectName(vec![Ident { value: "maintenance_work_mem".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }]), - config_value: SetConfigValue::Value(Expr::Value( - (number("100000")).with_empty_span() - )), - in_database: Some(ObjectName::from(vec![Ident { + config_value: SetConfigValue::Value(Expr::Value(number("100000"))), + in_database: Some(ObjectName(vec![Ident { value: "database_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }])) }, } @@ -3960,20 +3501,17 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, operation: AlterRoleOperation::Set { - config_name: ObjectName::from(vec![Ident { + config_name: ObjectName(vec![Ident { value: "maintenance_work_mem".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }]), config_value: SetConfigValue::Default, - in_database: Some(ObjectName::from(vec![Ident { + in_database: Some(ObjectName(vec![Ident { value: "database_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }])) }, } @@ -3985,8 +3523,7 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, operation: AlterRoleOperation::Reset { config_name: ResetConfig::ALL, @@ -4001,19 +3538,16 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }, operation: AlterRoleOperation::Reset { - config_name: ResetConfig::ConfigName(ObjectName::from(vec![Ident { + config_name: ResetConfig::ConfigName(ObjectName(vec![Ident { value: "maintenance_work_mem".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }])), - in_database: Some(ObjectName::from(vec![Ident { + in_database: Some(ObjectName(vec![Ident { value: "database_name".into(), - quote_style: None, - span: Span::empty(), + quote_style: None }])) }, } @@ -4034,12 +3568,10 @@ fn parse_delimited_identifiers() { args, with_hints, version, - .. + with_ordinality: _, + partitions: _, } => { - assert_eq!( - ObjectName::from(vec![Ident::with_quote('"', "a table")]), - name - ); + assert_eq!(vec![Ident::with_quote('"', "a table")], name.0); assert_eq!(Ident::with_quote('"', "alias"), alias.unwrap().name); assert!(args.is_none()); assert!(with_hints.is_empty()); @@ -4058,8 +3590,7 @@ fn parse_delimited_identifiers() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::with_quote('"', "myfun")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::with_quote('"', "myfun")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -4106,226 +3637,15 @@ fn parse_update_in_with_subquery() { pg_and_generic().verified_stmt(r#"WITH "result" AS (UPDATE "Hero" SET "name" = 'Captain America', "number_of_movies" = "number_of_movies" + 1 WHERE "secret_identity" = 'Sam Wilson' RETURNING "id", "name", "secret_identity", "number_of_movies") SELECT * FROM "result""#); } -#[test] -fn parser_create_function_with_args() { - let sql1 = r#"CREATE OR REPLACE FUNCTION check_strings_different(str1 VARCHAR, str2 VARCHAR) RETURNS BOOLEAN LANGUAGE plpgsql AS $$ -BEGIN - IF str1 <> str2 THEN - RETURN TRUE; - ELSE - RETURN FALSE; - END IF; -END; -$$"#; - - assert_eq!( - pg_and_generic().verified_stmt(sql1), - Statement::CreateFunction(CreateFunction { - or_alter: false, - or_replace: true, - temporary: false, - name: ObjectName::from(vec![Ident::new("check_strings_different")]), - args: Some(vec![ - OperateFunctionArg::with_name( - "str1", - DataType::Varchar(None), - ), - OperateFunctionArg::with_name( - "str2", - DataType::Varchar(None), - ), - ]), - return_type: Some(DataType::Boolean), - language: Some("plpgsql".into()), - behavior: None, - called_on_null: None, - parallel: None, - function_body: Some(CreateFunctionBody::AsBeforeOptions(Expr::Value( - (Value::DollarQuotedString(DollarQuotedString {value: "\nBEGIN\n IF str1 <> str2 THEN\n RETURN TRUE;\n ELSE\n RETURN FALSE;\n END IF;\nEND;\n".to_owned(), tag: None})).with_empty_span() - ))), - if_not_exists: false, - using: None, - determinism_specifier: None, - options: None, - remote_connection: None, - }) - ); - - let sql2 = r#"CREATE OR REPLACE FUNCTION check_not_zero(int1 INT) RETURNS BOOLEAN LANGUAGE plpgsql AS $$ -BEGIN - IF int1 <> 0 THEN - RETURN TRUE; - ELSE - RETURN FALSE; - END IF; -END; -$$"#; - assert_eq!( - pg_and_generic().verified_stmt(sql2), - Statement::CreateFunction(CreateFunction { - or_alter: false, - or_replace: true, - temporary: false, - name: ObjectName::from(vec![Ident::new("check_not_zero")]), - args: Some(vec![ - OperateFunctionArg::with_name( - "int1", - DataType::Int(None) - ) - ]), - return_type: Some(DataType::Boolean), - language: Some("plpgsql".into()), - behavior: None, - called_on_null: None, - parallel: None, - function_body: Some(CreateFunctionBody::AsBeforeOptions(Expr::Value( - (Value::DollarQuotedString(DollarQuotedString {value: "\nBEGIN\n IF int1 <> 0 THEN\n RETURN TRUE;\n ELSE\n RETURN FALSE;\n END IF;\nEND;\n".to_owned(), tag: None})).with_empty_span() - ))), - if_not_exists: false, - using: None, - determinism_specifier: None, - options: None, - remote_connection: None, - }) - ); - - let sql3 = r#"CREATE OR REPLACE FUNCTION check_values_different(a INT, b INT) RETURNS BOOLEAN LANGUAGE plpgsql AS $$ -BEGIN - IF a <> b THEN - RETURN TRUE; - ELSE - RETURN FALSE; - END IF; -END; -$$"#; - assert_eq!( - pg_and_generic().verified_stmt(sql3), - Statement::CreateFunction(CreateFunction { - or_alter: false, - or_replace: true, - temporary: false, - name: ObjectName::from(vec![Ident::new("check_values_different")]), - args: Some(vec![ - OperateFunctionArg::with_name( - "a", - DataType::Int(None) - ), - OperateFunctionArg::with_name( - "b", - DataType::Int(None) - ), - ]), - return_type: Some(DataType::Boolean), - language: Some("plpgsql".into()), - behavior: None, - called_on_null: None, - parallel: None, - function_body: Some(CreateFunctionBody::AsBeforeOptions(Expr::Value( - (Value::DollarQuotedString(DollarQuotedString {value: "\nBEGIN\n IF a <> b THEN\n RETURN TRUE;\n ELSE\n RETURN FALSE;\n END IF;\nEND;\n".to_owned(), tag: None})).with_empty_span() - ))), - if_not_exists: false, - using: None, - determinism_specifier: None, - options: None, - remote_connection: None, - }) - ); - - let sql4 = r#"CREATE OR REPLACE FUNCTION check_values_different(int1 INT, int2 INT) RETURNS BOOLEAN LANGUAGE plpgsql AS $$ -BEGIN - IF int1 <> int2 THEN - RETURN TRUE; - ELSE - RETURN FALSE; - END IF; -END; -$$"#; - assert_eq!( - pg_and_generic().verified_stmt(sql4), - Statement::CreateFunction(CreateFunction { - or_alter: false, - or_replace: true, - temporary: false, - name: ObjectName::from(vec![Ident::new("check_values_different")]), - args: Some(vec![ - OperateFunctionArg::with_name( - "int1", - DataType::Int(None) - ), - OperateFunctionArg::with_name( - "int2", - DataType::Int(None) - ), - ]), - return_type: Some(DataType::Boolean), - language: Some("plpgsql".into()), - behavior: None, - called_on_null: None, - parallel: None, - function_body: Some(CreateFunctionBody::AsBeforeOptions(Expr::Value( - (Value::DollarQuotedString(DollarQuotedString {value: "\nBEGIN\n IF int1 <> int2 THEN\n RETURN TRUE;\n ELSE\n RETURN FALSE;\n END IF;\nEND;\n".to_owned(), tag: None})).with_empty_span() - ))), - if_not_exists: false, - using: None, - determinism_specifier: None, - options: None, - remote_connection: None, - }) - ); - - let sql5 = r#"CREATE OR REPLACE FUNCTION foo(a TIMESTAMP WITH TIME ZONE, b VARCHAR) RETURNS BOOLEAN LANGUAGE plpgsql AS $$ - BEGIN - RETURN TRUE; - END; - $$"#; - assert_eq!( - pg_and_generic().verified_stmt(sql5), - Statement::CreateFunction(CreateFunction { - or_alter: false, - or_replace: true, - temporary: false, - name: ObjectName::from(vec![Ident::new("foo")]), - args: Some(vec![ - OperateFunctionArg::with_name( - "a", - DataType::Timestamp(None, TimezoneInfo::WithTimeZone) - ), - OperateFunctionArg::with_name("b", DataType::Varchar(None)), - ]), - return_type: Some(DataType::Boolean), - language: Some("plpgsql".into()), - behavior: None, - called_on_null: None, - parallel: None, - function_body: Some(CreateFunctionBody::AsBeforeOptions(Expr::Value( - (Value::DollarQuotedString(DollarQuotedString { - value: "\n BEGIN\n RETURN TRUE;\n END;\n ".to_owned(), - tag: None - })) - .with_empty_span() - ))), - if_not_exists: false, - using: None, - determinism_specifier: None, - options: None, - remote_connection: None, - }) - ); - - let incorrect_sql = "CREATE FUNCTION add(function(struct int64), b INTEGER) RETURNS INTEGER LANGUAGE SQL IMMUTABLE STRICT PARALLEL SAFE AS 'select $1 + $2;'"; - assert!(pg().parse_sql_statements(incorrect_sql).is_err(),); -} - #[test] fn parse_create_function() { let sql = "CREATE FUNCTION add(INTEGER, INTEGER) RETURNS INTEGER LANGUAGE SQL IMMUTABLE STRICT PARALLEL SAFE AS 'select $1 + $2;'"; assert_eq!( pg_and_generic().verified_stmt(sql), - Statement::CreateFunction(CreateFunction { - or_alter: false, + Statement::CreateFunction { or_replace: false, temporary: false, - name: ObjectName::from(vec![Ident::new("add")]), + name: ObjectName(vec![Ident::new("add")]), args: Some(vec![ OperateFunctionArg::unnamed(DataType::Integer(None)), OperateFunctionArg::unnamed(DataType::Integer(None)), @@ -4336,14 +3656,14 @@ fn parse_create_function() { called_on_null: Some(FunctionCalledOnNull::Strict), parallel: Some(FunctionParallel::Safe), function_body: Some(CreateFunctionBody::AsBeforeOptions(Expr::Value( - (Value::SingleQuotedString("select $1 + $2;".into())).with_empty_span() + Value::SingleQuotedString("select $1 + $2;".into()) ))), if_not_exists: false, using: None, determinism_specifier: None, options: None, remote_connection: None, - }) + } ); } @@ -4354,8 +3674,6 @@ fn parse_create_function_detailed() { pg_and_generic().verified_stmt("CREATE OR REPLACE FUNCTION add(a INTEGER, IN b INTEGER = 1) RETURNS INTEGER LANGUAGE SQL STABLE PARALLEL UNSAFE RETURN a + b"); pg_and_generic().verified_stmt("CREATE OR REPLACE FUNCTION add(a INTEGER, IN b INTEGER = 1) RETURNS INTEGER LANGUAGE SQL STABLE CALLED ON NULL INPUT PARALLEL UNSAFE RETURN a + b"); pg_and_generic().verified_stmt(r#"CREATE OR REPLACE FUNCTION increment(i INTEGER) RETURNS INTEGER LANGUAGE plpgsql AS $$ BEGIN RETURN i + 1; END; $$"#); - pg_and_generic().verified_stmt(r#"CREATE OR REPLACE FUNCTION no_arg() RETURNS VOID LANGUAGE plpgsql AS $$ BEGIN DELETE FROM my_table; END; $$"#); - pg_and_generic().verified_stmt(r#"CREATE OR REPLACE FUNCTION return_table(i INTEGER) RETURNS TABLE(id UUID, is_active BOOLEAN) LANGUAGE plpgsql AS $$ BEGIN RETURN QUERY SELECT NULL::UUID, NULL::BOOLEAN; END; $$"#); } #[test] fn parse_incorrect_create_function_parallel() { @@ -4371,14 +3689,13 @@ fn parse_drop_function() { Statement::DropFunction { if_exists: true, func_desc: vec![FunctionDesc { - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "test_func".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }]), args: None }], - drop_behavior: None + option: None } ); @@ -4388,10 +3705,9 @@ fn parse_drop_function() { Statement::DropFunction { if_exists: true, func_desc: vec![FunctionDesc { - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "test_func".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }]), args: Some(vec![ OperateFunctionArg::with_name("a", DataType::Integer(None)), @@ -4399,13 +3715,11 @@ fn parse_drop_function() { mode: Some(ArgMode::In), name: Some("b".into()), data_type: DataType::Integer(None), - default_expr: Some(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), + default_expr: Some(Expr::Value(Value::Number("1".parse().unwrap(), false))), } ]), }], - drop_behavior: None + option: None } ); @@ -4416,10 +3730,9 @@ fn parse_drop_function() { if_exists: true, func_desc: vec![ FunctionDesc { - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "test_func1".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }]), args: Some(vec![ OperateFunctionArg::with_name("a", DataType::Integer(None)), @@ -4427,17 +3740,17 @@ fn parse_drop_function() { mode: Some(ArgMode::In), name: Some("b".into()), data_type: DataType::Integer(None), - default_expr: Some(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), + default_expr: Some(Expr::Value(Value::Number( + "1".parse().unwrap(), + false + ))), } ]), }, FunctionDesc { - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "test_func2".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }]), args: Some(vec![ OperateFunctionArg::with_name("a", DataType::Varchar(None)), @@ -4445,78 +3758,19 @@ fn parse_drop_function() { mode: Some(ArgMode::In), name: Some("b".into()), data_type: DataType::Integer(None), - default_expr: Some(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), + default_expr: Some(Expr::Value(Value::Number( + "1".parse().unwrap(), + false + ))), } ]), } ], - drop_behavior: None + option: None } ); } -#[test] -fn parse_drop_domain() { - let sql = "DROP DOMAIN IF EXISTS jpeg_domain"; - assert_eq!( - pg().verified_stmt(sql), - Statement::DropDomain(DropDomain { - if_exists: true, - name: ObjectName::from(vec![Ident { - value: "jpeg_domain".to_string(), - quote_style: None, - span: Span::empty(), - }]), - drop_behavior: None - }) - ); - - let sql = "DROP DOMAIN jpeg_domain"; - assert_eq!( - pg().verified_stmt(sql), - Statement::DropDomain(DropDomain { - if_exists: false, - name: ObjectName::from(vec![Ident { - value: "jpeg_domain".to_string(), - quote_style: None, - span: Span::empty(), - }]), - drop_behavior: None - }) - ); - - let sql = "DROP DOMAIN IF EXISTS jpeg_domain CASCADE"; - assert_eq!( - pg().verified_stmt(sql), - Statement::DropDomain(DropDomain { - if_exists: true, - name: ObjectName::from(vec![Ident { - value: "jpeg_domain".to_string(), - quote_style: None, - span: Span::empty(), - }]), - drop_behavior: Some(DropBehavior::Cascade) - }) - ); - - let sql = "DROP DOMAIN IF EXISTS jpeg_domain RESTRICT"; - - assert_eq!( - pg().verified_stmt(sql), - Statement::DropDomain(DropDomain { - if_exists: true, - name: ObjectName::from(vec![Ident { - value: "jpeg_domain".to_string(), - quote_style: None, - span: Span::empty(), - }]), - drop_behavior: Some(DropBehavior::Restrict) - }) - ); -} - #[test] fn parse_drop_procedure() { let sql = "DROP PROCEDURE IF EXISTS test_proc"; @@ -4525,14 +3779,13 @@ fn parse_drop_procedure() { Statement::DropProcedure { if_exists: true, proc_desc: vec![FunctionDesc { - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "test_proc".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }]), args: None }], - drop_behavior: None + option: None } ); @@ -4542,10 +3795,9 @@ fn parse_drop_procedure() { Statement::DropProcedure { if_exists: true, proc_desc: vec![FunctionDesc { - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "test_proc".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }]), args: Some(vec![ OperateFunctionArg::with_name("a", DataType::Integer(None)), @@ -4553,13 +3805,11 @@ fn parse_drop_procedure() { mode: Some(ArgMode::In), name: Some("b".into()), data_type: DataType::Integer(None), - default_expr: Some(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), + default_expr: Some(Expr::Value(Value::Number("1".parse().unwrap(), false))), } ]), }], - drop_behavior: None + option: None } ); @@ -4570,10 +3820,9 @@ fn parse_drop_procedure() { if_exists: true, proc_desc: vec![ FunctionDesc { - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "test_proc1".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }]), args: Some(vec![ OperateFunctionArg::with_name("a", DataType::Integer(None)), @@ -4581,17 +3830,17 @@ fn parse_drop_procedure() { mode: Some(ArgMode::In), name: Some("b".into()), data_type: DataType::Integer(None), - default_expr: Some(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), + default_expr: Some(Expr::Value(Value::Number( + "1".parse().unwrap(), + false + ))), } ]), }, FunctionDesc { - name: ObjectName::from(vec![Ident { + name: ObjectName(vec![Ident { value: "test_proc2".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }]), args: Some(vec![ OperateFunctionArg::with_name("a", DataType::Varchar(None)), @@ -4599,14 +3848,15 @@ fn parse_drop_procedure() { mode: Some(ArgMode::In), name: Some("b".into()), data_type: DataType::Integer(None), - default_expr: Some(Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - )), + default_expr: Some(Expr::Value(Value::Number( + "1".parse().unwrap(), + false + ))), } ]), } ], - drop_behavior: None + option: None } ); @@ -4638,76 +3888,57 @@ fn parse_dollar_quoted_string() { }; assert_eq!( - &Expr::Value( - (Value::DollarQuotedString(DollarQuotedString { - tag: None, - value: "hello".into() - })) - .with_empty_span() - ), + &Expr::Value(Value::DollarQuotedString(DollarQuotedString { + tag: None, + value: "hello".into() + })), expr_from_projection(&projection[0]) ); assert_eq!( - &Expr::Value( - (Value::DollarQuotedString(DollarQuotedString { - tag: Some("tag_name".into()), - value: "world".into() - })) - .with_empty_span() - ), + &Expr::Value(Value::DollarQuotedString(DollarQuotedString { + tag: Some("tag_name".into()), + value: "world".into() + })), expr_from_projection(&projection[1]) ); assert_eq!( - &Expr::Value( - (Value::DollarQuotedString(DollarQuotedString { - tag: None, - value: "Foo$Bar".into() - })) - .with_empty_span() - ), + &Expr::Value(Value::DollarQuotedString(DollarQuotedString { + tag: None, + value: "Foo$Bar".into() + })), expr_from_projection(&projection[2]) ); assert_eq!( projection[3], SelectItem::ExprWithAlias { - expr: Expr::Value( - (Value::DollarQuotedString(DollarQuotedString { - tag: None, - value: "Foo$Bar".into(), - })) - .with_empty_span() - ), + expr: Expr::Value(Value::DollarQuotedString(DollarQuotedString { + tag: None, + value: "Foo$Bar".into(), + })), alias: Ident { value: "col_name".into(), quote_style: None, - span: Span::empty(), }, } ); assert_eq!( expr_from_projection(&projection[4]), - &Expr::Value( - (Value::DollarQuotedString(DollarQuotedString { - tag: None, - value: "".into() - })) - .with_empty_span() - ), + &Expr::Value(Value::DollarQuotedString(DollarQuotedString { + tag: None, + value: "".into() + })), ); assert_eq!( expr_from_projection(&projection[5]), - &Expr::Value( - (Value::DollarQuotedString(DollarQuotedString { - tag: Some("tag_name".into()), - value: "".into() - })) - .with_empty_span() - ), + &Expr::Value(Value::DollarQuotedString(DollarQuotedString { + tag: Some("tag_name".into()), + value: "".into() + })), ); } @@ -4787,16 +4018,16 @@ fn parse_select_group_by_cube() { #[test] fn parse_truncate() { let truncate = pg_and_generic().verified_stmt("TRUNCATE db.table_name"); - let table_name = ObjectName::from(vec![Ident::new("db"), Ident::new("table_name")]); + let table_name = ObjectName(vec![Ident::new("db"), Ident::new("table_name")]); let table_names = vec![TruncateTableTarget { name: table_name.clone(), - only: false, }]; assert_eq!( Statement::Truncate { table_names, partitions: None, table: false, + only: false, identity: None, cascade: None, on_cluster: None, @@ -4810,10 +4041,9 @@ fn parse_truncate_with_options() { let truncate = pg_and_generic() .verified_stmt("TRUNCATE TABLE ONLY db.table_name RESTART IDENTITY CASCADE"); - let table_name = ObjectName::from(vec![Ident::new("db"), Ident::new("table_name")]); + let table_name = ObjectName(vec![Ident::new("db"), Ident::new("table_name")]); let table_names = vec![TruncateTableTarget { name: table_name.clone(), - only: true, }]; assert_eq!( @@ -4821,8 +4051,9 @@ fn parse_truncate_with_options() { table_names, partitions: None, table: true, + only: true, identity: Some(TruncateIdentityOption::Restart), - cascade: Some(CascadeOption::Cascade), + cascade: Some(TruncateCascadeOption::Cascade), on_cluster: None, }, truncate @@ -4835,17 +4066,15 @@ fn parse_truncate_with_table_list() { "TRUNCATE TABLE db.table_name, db.other_table_name RESTART IDENTITY CASCADE", ); - let table_name_a = ObjectName::from(vec![Ident::new("db"), Ident::new("table_name")]); - let table_name_b = ObjectName::from(vec![Ident::new("db"), Ident::new("other_table_name")]); + let table_name_a = ObjectName(vec![Ident::new("db"), Ident::new("table_name")]); + let table_name_b = ObjectName(vec![Ident::new("db"), Ident::new("other_table_name")]); let table_names = vec![ TruncateTableTarget { name: table_name_a.clone(), - only: false, }, TruncateTableTarget { name: table_name_b.clone(), - only: false, }, ]; @@ -4854,8 +4083,9 @@ fn parse_truncate_with_table_list() { table_names, partitions: None, table: true, + only: false, identity: Some(TruncateIdentityOption::Restart), - cascade: Some(CascadeOption::Cascade), + cascade: Some(TruncateCascadeOption::Cascade), on_cluster: None, }, truncate @@ -4885,6 +4115,7 @@ fn parse_create_table_with_alias() { name, columns, constraints, + with_options: _with_options, if_not_exists: false, external: false, file_format: None, @@ -4898,31 +4129,37 @@ fn parse_create_table_with_alias() { ColumnDef { name: "int8_col".into(), data_type: DataType::Int8(None), + collation: None, options: vec![] }, ColumnDef { name: "int4_col".into(), data_type: DataType::Int4(None), + collation: None, options: vec![] }, ColumnDef { name: "int2_col".into(), data_type: DataType::Int2(None), + collation: None, options: vec![] }, ColumnDef { name: "float8_col".into(), data_type: DataType::Float8, + collation: None, options: vec![] }, ColumnDef { name: "float4_col".into(), data_type: DataType::Float4, + collation: None, options: vec![] }, ColumnDef { name: "bool_col".into(), data_type: DataType::Bool, + collation: None, options: vec![] }, ] @@ -4944,11 +4181,13 @@ fn parse_create_table_with_partition_by() { ColumnDef { name: "a".into(), data_type: DataType::Int(None), + collation: None, options: vec![] }, ColumnDef { name: "b".into(), data_type: DataType::Text, + collation: None, options: vec![] } ], @@ -4995,7 +4234,7 @@ fn parse_join_constraint_unnest_alias() { with_ordinality: false, }, global: false, - join_operator: JoinOperator::Join(JoinConstraint::On(Expr::BinaryOp { + join_operator: JoinOperator::Inner(JoinConstraint::On(Expr::BinaryOp { left: Box::new(Expr::Identifier("c1".into())), op: BinaryOperator::Eq, right: Box::new(Expr::Identifier("c2".into())), @@ -5024,26 +4263,22 @@ fn test_simple_postgres_insert_with_alias() { or: None, ignore: false, into: true, - table: TableObject::TableName(ObjectName::from(vec![Ident { + table_name: ObjectName(vec![Ident { value: "test_tables".to_string(), - quote_style: None, - span: Span::empty(), - }])), + quote_style: None + }]), table_alias: Some(Ident { value: "test_table".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }), columns: vec![ Ident { value: "id".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }, Ident { value: "a".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None } ], overwrite: false, @@ -5052,30 +4287,31 @@ fn test_simple_postgres_insert_with_alias() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Identifier(Ident::new("DEFAULT")), - Expr::Value((Value::Number("123".to_string(), false)).with_empty_span()) + Expr::Identifier(Ident { + value: "DEFAULT".to_string(), + quote_style: None + }), + Expr::Value(Value::Number("123".to_string(), false)) ]] })), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), - assignments: vec![], partitioned: None, after_columns: vec![], - has_table_keyword: false, + table: false, on: None, returning: None, replace_into: false, priority: None, - insert_alias: None, - settings: None, - format_clause: None, + insert_alias: None }) ) } @@ -5093,26 +4329,22 @@ fn test_simple_postgres_insert_with_alias() { or: None, ignore: false, into: true, - table: TableObject::TableName(ObjectName::from(vec![Ident { + table_name: ObjectName(vec![Ident { value: "test_tables".to_string(), - quote_style: None, - span: Span::empty(), - }])), + quote_style: None + }]), table_alias: Some(Ident { value: "test_table".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }), columns: vec![ Ident { value: "id".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }, Ident { value: "a".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None } ], overwrite: false, @@ -5121,33 +4353,34 @@ fn test_simple_postgres_insert_with_alias() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Identifier(Ident::new("DEFAULT")), - Expr::Value( - (Value::Number(bigdecimal::BigDecimal::new(123.into(), 0), false)) - .with_empty_span() - ) + Expr::Identifier(Ident { + value: "DEFAULT".to_string(), + quote_style: None + }), + Expr::Value(Value::Number( + bigdecimal::BigDecimal::new(123.into(), 0), + false + )) ]] })), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), - assignments: vec![], partitioned: None, after_columns: vec![], - has_table_keyword: false, + table: false, on: None, returning: None, replace_into: false, priority: None, - insert_alias: None, - settings: None, - format_clause: None, + insert_alias: None }) ) } @@ -5164,26 +4397,22 @@ fn test_simple_insert_with_quoted_alias() { or: None, ignore: false, into: true, - table: TableObject::TableName(ObjectName::from(vec![Ident { + table_name: ObjectName(vec![Ident { value: "test_tables".to_string(), - quote_style: None, - span: Span::empty(), - }])), + quote_style: None + }]), table_alias: Some(Ident { value: "Test_Table".to_string(), - quote_style: Some('"'), - span: Span::empty(), + quote_style: Some('"') }), columns: vec![ Ident { value: "id".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None }, Ident { value: "a".to_string(), - quote_style: None, - span: Span::empty(), + quote_style: None } ], overwrite: false, @@ -5192,32 +4421,31 @@ fn test_simple_insert_with_quoted_alias() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Identifier(Ident::new("DEFAULT")), - Expr::Value( - (Value::SingleQuotedString("0123".to_string())).with_empty_span() - ) + Expr::Identifier(Ident { + value: "DEFAULT".to_string(), + quote_style: None + }), + Expr::Value(Value::SingleQuotedString("0123".to_string())) ]] })), order_by: None, - limit_clause: None, + limit: None, + limit_by: vec![], + offset: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, - pipe_operators: vec![], })), - assignments: vec![], partitioned: None, after_columns: vec![], - has_table_keyword: false, + table: false, on: None, returning: None, replace_into: false, priority: None, insert_alias: None, - settings: None, - format_clause: None, }) ) } @@ -5260,25 +4488,22 @@ fn parse_at_time_zone() { left: Box::new(Expr::AtTimeZone { timestamp: Box::new(Expr::TypedString { data_type: DataType::Timestamp(None, TimezoneInfo::None), - value: ValueWithSpan { - value: Value::SingleQuotedString("2001-09-28 01:00".to_string()), - span: Span::empty(), - }, + value: "2001-09-28 01:00".to_owned(), }), time_zone: Box::new(Expr::Cast { kind: CastKind::DoubleColon, - expr: Box::new(Expr::Value( - Value::SingleQuotedString("America/Los_Angeles".to_owned()).with_empty_span(), - )), + expr: Box::new(Expr::Value(Value::SingleQuotedString( + "America/Los_Angeles".to_owned(), + ))), data_type: DataType::Text, format: None, }), }), op: BinaryOperator::Plus, right: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value( - Value::SingleQuotedString("23 hours".to_owned()).with_empty_span(), - )), + value: Box::new(Expr::Value(Value::SingleQuotedString( + "23 hours".to_owned(), + ))), leading_field: None, leading_precision: None, last_field: None, @@ -5297,22 +4522,16 @@ fn parse_at_time_zone() { fn parse_create_table_with_options() { let sql = "CREATE TABLE t (c INT) WITH (foo = 'bar', a = 123)"; match pg().verified_stmt(sql) { - Statement::CreateTable(CreateTable { table_options, .. }) => { - let with_options = match table_options { - CreateTableOptions::With(options) => options, - _ => unreachable!(), - }; + Statement::CreateTable(CreateTable { with_options, .. }) => { assert_eq!( vec![ SqlOption::KeyValue { key: "foo".into(), - value: Expr::Value( - (Value::SingleQuotedString("bar".into())).with_empty_span() - ), + value: Expr::Value(Value::SingleQuotedString("bar".into())), }, SqlOption::KeyValue { key: "a".into(), - value: Expr::value(number("123")), + value: Expr::Value(number("123")), }, ], with_options @@ -5358,138 +4577,35 @@ fn test_table_unnest_with_ordinality() { #[test] fn test_escaped_string_literal() { match pg().verified_expr(r#"E'\n'"#) { - Expr::Value(ValueWithSpan { - value: Value::EscapedStringLiteral(s), - span: _, - }) => { + Expr::Value(Value::EscapedStringLiteral(s)) => { assert_eq!("\n", s); } _ => unreachable!(), } } -#[test] -fn parse_create_domain() { - let sql1 = "CREATE DOMAIN my_domain AS INTEGER CHECK (VALUE > 0)"; - let expected = Statement::CreateDomain(CreateDomain { - name: ObjectName::from(vec![Ident::new("my_domain")]), - data_type: DataType::Integer(None), - collation: None, - default: None, - constraints: vec![TableConstraint::Check { - name: None, - expr: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("VALUE"))), - op: BinaryOperator::Gt, - right: Box::new(Expr::Value(test_utils::number("0").into())), - }), - enforced: None, - }], - }); - - assert_eq!(pg().verified_stmt(sql1), expected); - - let sql2 = "CREATE DOMAIN my_domain AS INTEGER COLLATE \"en_US\" CHECK (VALUE > 0)"; - let expected = Statement::CreateDomain(CreateDomain { - name: ObjectName::from(vec![Ident::new("my_domain")]), - data_type: DataType::Integer(None), - collation: Some(Ident::with_quote('"', "en_US")), - default: None, - constraints: vec![TableConstraint::Check { - name: None, - expr: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("VALUE"))), - op: BinaryOperator::Gt, - right: Box::new(Expr::Value(test_utils::number("0").into())), - }), - enforced: None, - }], - }); - - assert_eq!(pg().verified_stmt(sql2), expected); - - let sql3 = "CREATE DOMAIN my_domain AS INTEGER DEFAULT 1 CHECK (VALUE > 0)"; - let expected = Statement::CreateDomain(CreateDomain { - name: ObjectName::from(vec![Ident::new("my_domain")]), - data_type: DataType::Integer(None), - collation: None, - default: Some(Expr::Value(test_utils::number("1").into())), - constraints: vec![TableConstraint::Check { - name: None, - expr: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("VALUE"))), - op: BinaryOperator::Gt, - right: Box::new(Expr::Value(test_utils::number("0").into())), - }), - enforced: None, - }], - }); - - assert_eq!(pg().verified_stmt(sql3), expected); - - let sql4 = "CREATE DOMAIN my_domain AS INTEGER COLLATE \"en_US\" DEFAULT 1 CHECK (VALUE > 0)"; - let expected = Statement::CreateDomain(CreateDomain { - name: ObjectName::from(vec![Ident::new("my_domain")]), - data_type: DataType::Integer(None), - collation: Some(Ident::with_quote('"', "en_US")), - default: Some(Expr::Value(test_utils::number("1").into())), - constraints: vec![TableConstraint::Check { - name: None, - expr: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("VALUE"))), - op: BinaryOperator::Gt, - right: Box::new(Expr::Value(test_utils::number("0").into())), - }), - enforced: None, - }], - }); - - assert_eq!(pg().verified_stmt(sql4), expected); - - let sql5 = "CREATE DOMAIN my_domain AS INTEGER CONSTRAINT my_constraint CHECK (VALUE > 0)"; - let expected = Statement::CreateDomain(CreateDomain { - name: ObjectName::from(vec![Ident::new("my_domain")]), - data_type: DataType::Integer(None), - collation: None, - default: None, - constraints: vec![TableConstraint::Check { - name: Some(Ident::new("my_constraint")), - expr: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("VALUE"))), - op: BinaryOperator::Gt, - right: Box::new(Expr::Value(test_utils::number("0").into())), - }), - enforced: None, - }], - }); - - assert_eq!(pg().verified_stmt(sql5), expected); -} - #[test] fn parse_create_simple_before_insert_trigger() { let sql = "CREATE TRIGGER check_insert BEFORE INSERT ON accounts FOR EACH ROW EXECUTE FUNCTION check_account_insert"; let expected = Statement::CreateTrigger { - or_alter: false, or_replace: false, is_constraint: false, - name: ObjectName::from(vec![Ident::new("check_insert")]), + name: ObjectName(vec![Ident::new("check_insert")]), period: TriggerPeriod::Before, events: vec![TriggerEvent::Insert], - table_name: ObjectName::from(vec![Ident::new("accounts")]), + table_name: ObjectName(vec![Ident::new("accounts")]), referenced_table_name: None, referencing: vec![], trigger_object: TriggerObject::Row, include_each: true, condition: None, - exec_body: Some(TriggerExecBody { + exec_body: TriggerExecBody { exec_type: TriggerExecBodyType::Function, func_desc: FunctionDesc { - name: ObjectName::from(vec![Ident::new("check_account_insert")]), + name: ObjectName(vec![Ident::new("check_account_insert")]), args: None, }, - }), - statements: None, + }, characteristics: None, }; @@ -5500,13 +4616,12 @@ fn parse_create_simple_before_insert_trigger() { fn parse_create_after_update_trigger_with_condition() { let sql = "CREATE TRIGGER check_update AFTER UPDATE ON accounts FOR EACH ROW WHEN (NEW.balance > 10000) EXECUTE FUNCTION check_account_update"; let expected = Statement::CreateTrigger { - or_alter: false, or_replace: false, is_constraint: false, - name: ObjectName::from(vec![Ident::new("check_update")]), + name: ObjectName(vec![Ident::new("check_update")]), period: TriggerPeriod::After, events: vec![TriggerEvent::Update(vec![])], - table_name: ObjectName::from(vec![Ident::new("accounts")]), + table_name: ObjectName(vec![Ident::new("accounts")]), referenced_table_name: None, referencing: vec![], trigger_object: TriggerObject::Row, @@ -5517,16 +4632,15 @@ fn parse_create_after_update_trigger_with_condition() { Ident::new("balance"), ])), op: BinaryOperator::Gt, - right: Box::new(Expr::value(number("10000"))), + right: Box::new(Expr::Value(number("10000"))), }))), - exec_body: Some(TriggerExecBody { + exec_body: TriggerExecBody { exec_type: TriggerExecBodyType::Function, func_desc: FunctionDesc { - name: ObjectName::from(vec![Ident::new("check_account_update")]), + name: ObjectName(vec![Ident::new("check_account_update")]), args: None, }, - }), - statements: None, + }, characteristics: None, }; @@ -5537,26 +4651,24 @@ fn parse_create_after_update_trigger_with_condition() { fn parse_create_instead_of_delete_trigger() { let sql = "CREATE TRIGGER check_delete INSTEAD OF DELETE ON accounts FOR EACH ROW EXECUTE FUNCTION check_account_deletes"; let expected = Statement::CreateTrigger { - or_alter: false, or_replace: false, is_constraint: false, - name: ObjectName::from(vec![Ident::new("check_delete")]), + name: ObjectName(vec![Ident::new("check_delete")]), period: TriggerPeriod::InsteadOf, events: vec![TriggerEvent::Delete], - table_name: ObjectName::from(vec![Ident::new("accounts")]), + table_name: ObjectName(vec![Ident::new("accounts")]), referenced_table_name: None, referencing: vec![], trigger_object: TriggerObject::Row, include_each: true, condition: None, - exec_body: Some(TriggerExecBody { + exec_body: TriggerExecBody { exec_type: TriggerExecBodyType::Function, func_desc: FunctionDesc { - name: ObjectName::from(vec![Ident::new("check_account_deletes")]), + name: ObjectName(vec![Ident::new("check_account_deletes")]), args: None, }, - }), - statements: None, + }, characteristics: None, }; @@ -5567,30 +4679,28 @@ fn parse_create_instead_of_delete_trigger() { fn parse_create_trigger_with_multiple_events_and_deferrable() { let sql = "CREATE CONSTRAINT TRIGGER check_multiple_events BEFORE INSERT OR UPDATE OR DELETE ON accounts DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE FUNCTION check_account_changes"; let expected = Statement::CreateTrigger { - or_alter: false, or_replace: false, is_constraint: true, - name: ObjectName::from(vec![Ident::new("check_multiple_events")]), + name: ObjectName(vec![Ident::new("check_multiple_events")]), period: TriggerPeriod::Before, events: vec![ TriggerEvent::Insert, TriggerEvent::Update(vec![]), TriggerEvent::Delete, ], - table_name: ObjectName::from(vec![Ident::new("accounts")]), + table_name: ObjectName(vec![Ident::new("accounts")]), referenced_table_name: None, referencing: vec![], trigger_object: TriggerObject::Row, include_each: true, condition: None, - exec_body: Some(TriggerExecBody { + exec_body: TriggerExecBody { exec_type: TriggerExecBodyType::Function, func_desc: FunctionDesc { - name: ObjectName::from(vec![Ident::new("check_account_changes")]), + name: ObjectName(vec![Ident::new("check_account_changes")]), args: None, }, - }), - statements: None, + }, characteristics: Some(ConstraintCharacteristics { deferrable: Some(true), initially: Some(DeferrableInitial::Deferred), @@ -5605,37 +4715,35 @@ fn parse_create_trigger_with_multiple_events_and_deferrable() { fn parse_create_trigger_with_referencing() { let sql = "CREATE TRIGGER check_referencing BEFORE INSERT ON accounts REFERENCING NEW TABLE AS new_accounts OLD TABLE AS old_accounts FOR EACH ROW EXECUTE FUNCTION check_account_referencing"; let expected = Statement::CreateTrigger { - or_alter: false, or_replace: false, is_constraint: false, - name: ObjectName::from(vec![Ident::new("check_referencing")]), + name: ObjectName(vec![Ident::new("check_referencing")]), period: TriggerPeriod::Before, events: vec![TriggerEvent::Insert], - table_name: ObjectName::from(vec![Ident::new("accounts")]), + table_name: ObjectName(vec![Ident::new("accounts")]), referenced_table_name: None, referencing: vec![ TriggerReferencing { refer_type: TriggerReferencingType::NewTable, is_as: true, - transition_relation_name: ObjectName::from(vec![Ident::new("new_accounts")]), + transition_relation_name: ObjectName(vec![Ident::new("new_accounts")]), }, TriggerReferencing { refer_type: TriggerReferencingType::OldTable, is_as: true, - transition_relation_name: ObjectName::from(vec![Ident::new("old_accounts")]), + transition_relation_name: ObjectName(vec![Ident::new("old_accounts")]), }, ], trigger_object: TriggerObject::Row, include_each: true, condition: None, - exec_body: Some(TriggerExecBody { + exec_body: TriggerExecBody { exec_type: TriggerExecBodyType::Function, func_desc: FunctionDesc { - name: ObjectName::from(vec![Ident::new("check_account_referencing")]), + name: ObjectName(vec![Ident::new("check_account_referencing")]), args: None, }, - }), - statements: None, + }, characteristics: None, }; @@ -5655,7 +4763,7 @@ fn parse_create_trigger_invalid_cases() { ), ( "CREATE TRIGGER check_update TOMORROW UPDATE ON accounts EXECUTE FUNCTION check_account_update", - "Expected: one of FOR or BEFORE or AFTER or INSTEAD, found: TOMORROW" + "Expected: one of BEFORE or AFTER or INSTEAD, found: TOMORROW" ), ( "CREATE TRIGGER check_update BEFORE SAVE ON accounts EXECUTE FUNCTION check_account_update", @@ -5684,15 +4792,15 @@ fn parse_drop_trigger() { "DROP TRIGGER{} check_update ON table_name{}", if if_exists { " IF EXISTS" } else { "" }, option - .map(|o| format!(" {o}")) + .map(|o| format!(" {}", o)) .unwrap_or_else(|| "".to_string()) ); assert_eq!( pg().verified_stmt(sql), Statement::DropTrigger { if_exists, - trigger_name: ObjectName::from(vec![Ident::new("check_update")]), - table_name: Some(ObjectName::from(vec![Ident::new("table_name")])), + trigger_name: ObjectName(vec![Ident::new("check_update")]), + table_name: ObjectName(vec![Ident::new("table_name")]), option } ); @@ -5778,7 +4886,8 @@ fn parse_trigger_related_functions() { // Now we parse the statements and check if they are parsed correctly. let mut statements = pg() .parse_sql_statements(&format!( - "{sql_table_creation}{sql_create_function}{sql_create_trigger}{sql_drop_trigger}" + "{}{}{}{}", + sql_table_creation, sql_create_function, sql_create_trigger, sql_drop_trigger )) .unwrap(); @@ -5804,27 +4913,30 @@ fn parse_trigger_related_functions() { if_not_exists: false, transient: false, volatile: false, - iceberg: false, - name: ObjectName::from(vec![Ident::new("emp")]), + name: ObjectName(vec![Ident::new("emp")]), columns: vec![ ColumnDef { name: "empname".into(), data_type: DataType::Text, + collation: None, options: vec![], }, ColumnDef { name: "salary".into(), data_type: DataType::Integer(None), + collation: None, options: vec![], }, ColumnDef { name: "last_date".into(), data_type: DataType::Timestamp(None, TimezoneInfo::None), + collation: None, options: vec![], }, ColumnDef { name: "last_user".into(), data_type: DataType::Text, + collation: None, options: vec![], }, ], @@ -5836,13 +4948,19 @@ fn parse_trigger_related_functions() { storage: None, location: None }), + table_properties: vec![], + with_options: vec![], file_format: None, location: None, query: None, without_rowid: false, like: None, clone: None, + engine: None, comment: None, + auto_increment_offset: None, + default_charset: None, + collation: None, on_commit: None, on_cluster: None, primary_key: None, @@ -5850,7 +4968,7 @@ fn parse_trigger_related_functions() { partition_by: None, cluster_by: None, clustered_by: None, - inherits: None, + options: None, strict: false, copy_grants: false, enable_schema_evolution: None, @@ -5861,12 +4979,6 @@ fn parse_trigger_related_functions() { with_aggregation_policy: None, with_row_access_policy: None, with_tags: None, - base_location: None, - external_volume: None, - catalog: None, - catalog_sync: None, - storage_serialization_policy: None, - table_options: CreateTableOptions::None } ); @@ -5874,17 +4986,16 @@ fn parse_trigger_related_functions() { assert_eq!( create_function, - Statement::CreateFunction(CreateFunction { - or_alter: false, + Statement::CreateFunction { or_replace: false, temporary: false, if_not_exists: false, - name: ObjectName::from(vec![Ident::new("emp_stamp")]), - args: Some(vec![]), + name: ObjectName(vec![Ident::new("emp_stamp")]), + args: None, return_type: Some(DataType::Trigger), function_body: Some( CreateFunctionBody::AsBeforeOptions( - Expr::Value(( + Expr::Value( Value::DollarQuotedString( DollarQuotedString { value: "\n BEGIN\n -- Check that empname and salary are given\n IF NEW.empname IS NULL THEN\n RAISE EXCEPTION 'empname cannot be null';\n END IF;\n IF NEW.salary IS NULL THEN\n RAISE EXCEPTION '% cannot have null salary', NEW.empname;\n END IF;\n\n -- Who works for us when they must pay for it?\n IF NEW.salary < 0 THEN\n RAISE EXCEPTION '% cannot have a negative salary', NEW.empname;\n END IF;\n\n -- Remember who changed the payroll when\n NEW.last_date := current_timestamp;\n NEW.last_user := current_user;\n RETURN NEW;\n END;\n ".to_owned(), @@ -5892,8 +5003,8 @@ fn parse_trigger_related_functions() { "emp_stamp".to_owned(), ), }, - ) - ).with_empty_span()), + ), + ), ), ), behavior: None, @@ -5905,33 +5016,31 @@ fn parse_trigger_related_functions() { options: None, remote_connection: None } - )); + ); // Check the third statement assert_eq!( create_trigger, Statement::CreateTrigger { - or_alter: false, or_replace: false, is_constraint: false, - name: ObjectName::from(vec![Ident::new("emp_stamp")]), + name: ObjectName(vec![Ident::new("emp_stamp")]), period: TriggerPeriod::Before, events: vec![TriggerEvent::Insert, TriggerEvent::Update(vec![])], - table_name: ObjectName::from(vec![Ident::new("emp")]), + table_name: ObjectName(vec![Ident::new("emp")]), referenced_table_name: None, referencing: vec![], trigger_object: TriggerObject::Row, include_each: true, condition: None, - exec_body: Some(TriggerExecBody { + exec_body: TriggerExecBody { exec_type: TriggerExecBodyType::Function, func_desc: FunctionDesc { - name: ObjectName::from(vec![Ident::new("emp_stamp")]), + name: ObjectName(vec![Ident::new("emp_stamp")]), args: None, } - }), - statements: None, + }, characteristics: None } ); @@ -5941,8 +5050,8 @@ fn parse_trigger_related_functions() { drop_trigger, Statement::DropTrigger { if_exists: false, - trigger_name: ObjectName::from(vec![Ident::new("emp_stamp")]), - table_name: Some(ObjectName::from(vec![Ident::new("emp")])), + trigger_name: ObjectName(vec![Ident::new("emp_stamp")]), + table_name: ObjectName(vec![Ident::new("emp")]), option: None } ); @@ -5962,10 +5071,7 @@ fn test_unicode_string_literal() { ]; for (input, expected) in pairs { match pg_and_generic().verified_expr(input) { - Expr::Value(ValueWithSpan { - value: Value::UnicodeStringLiteral(s), - span: _, - }) => { + Expr::Value(Value::UnicodeStringLiteral(s)) => { assert_eq!(expected, s); } _ => unreachable!(), @@ -5981,17 +5087,12 @@ fn check_arrow_precedence(sql: &str, arrow_operator: BinaryOperator) { left: Box::new(Expr::Identifier(Ident { value: "foo".to_string(), quote_style: None, - span: Span::empty(), })), op: arrow_operator, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("bar".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString("bar".to_string()))), }), op: BinaryOperator::Eq, - right: Box::new(Expr::Value( - (Value::SingleQuotedString("spam".to_string())).with_empty_span() - )), + right: Box::new(Expr::Value(Value::SingleQuotedString("spam".to_string()))), } ) } @@ -6016,14 +5117,11 @@ fn arrow_cast_precedence() { left: Box::new(Expr::Identifier(Ident { value: "foo".to_string(), quote_style: None, - span: Span::empty(), })), op: BinaryOperator::Arrow, right: Box::new(Expr::Cast { kind: CastKind::DoubleColon, - expr: Box::new(Expr::Value( - (Value::SingleQuotedString("bar".to_string())).with_empty_span() - )), + expr: Box::new(Expr::Value(Value::SingleQuotedString("bar".to_string()))), data_type: DataType::Text, format: None, }), @@ -6033,8 +5131,15 @@ fn arrow_cast_precedence() { #[test] fn parse_create_type_as_enum() { - let sql = "CREATE TYPE public.my_type AS ENUM ('label1', 'label2', 'label3', 'label4')"; - let statement = pg_and_generic().verified_stmt(sql); + let statement = pg().one_statement_parses_to( + r#"CREATE TYPE public.my_type AS ENUM ( + 'label1', + 'label2', + 'label3', + 'label4' + );"#, + "CREATE TYPE public.my_type AS ENUM ('label1', 'label2', 'label3', 'label4')", + ); match statement { Statement::CreateType { name, @@ -6049,309 +5154,6 @@ fn parse_create_type_as_enum() { labels ); } - _ => unreachable!("{:?} should parse to Statement::CreateType", sql), - } -} - -#[test] -fn parse_alter_type() { - struct TestCase { - sql: &'static str, - name: &'static str, - operation: AlterTypeOperation, - } - vec![ - TestCase { - sql: "ALTER TYPE public.my_type RENAME TO my_new_type", - name: "public.my_type", - operation: AlterTypeOperation::Rename(AlterTypeRename { - new_name: Ident::new("my_new_type"), - }), - }, - TestCase { - sql: "ALTER TYPE public.my_type ADD VALUE IF NOT EXISTS 'label3.5' BEFORE 'label4'", - name: "public.my_type", - operation: AlterTypeOperation::AddValue(AlterTypeAddValue { - if_not_exists: true, - value: Ident::with_quote('\'', "label3.5"), - position: Some(AlterTypeAddValuePosition::Before(Ident::with_quote( - '\'', "label4", - ))), - }), - }, - TestCase { - sql: "ALTER TYPE public.my_type ADD VALUE 'label3.5' BEFORE 'label4'", - name: "public.my_type", - operation: AlterTypeOperation::AddValue(AlterTypeAddValue { - if_not_exists: false, - value: Ident::with_quote('\'', "label3.5"), - position: Some(AlterTypeAddValuePosition::Before(Ident::with_quote( - '\'', "label4", - ))), - }), - }, - TestCase { - sql: "ALTER TYPE public.my_type ADD VALUE IF NOT EXISTS 'label3.5' AFTER 'label3'", - name: "public.my_type", - operation: AlterTypeOperation::AddValue(AlterTypeAddValue { - if_not_exists: true, - value: Ident::with_quote('\'', "label3.5"), - position: Some(AlterTypeAddValuePosition::After(Ident::with_quote( - '\'', "label3", - ))), - }), - }, - TestCase { - sql: "ALTER TYPE public.my_type ADD VALUE 'label3.5' AFTER 'label3'", - name: "public.my_type", - operation: AlterTypeOperation::AddValue(AlterTypeAddValue { - if_not_exists: false, - value: Ident::with_quote('\'', "label3.5"), - position: Some(AlterTypeAddValuePosition::After(Ident::with_quote( - '\'', "label3", - ))), - }), - }, - TestCase { - sql: "ALTER TYPE public.my_type ADD VALUE IF NOT EXISTS 'label5'", - name: "public.my_type", - operation: AlterTypeOperation::AddValue(AlterTypeAddValue { - if_not_exists: true, - value: Ident::with_quote('\'', "label5"), - position: None, - }), - }, - TestCase { - sql: "ALTER TYPE public.my_type ADD VALUE 'label5'", - name: "public.my_type", - operation: AlterTypeOperation::AddValue(AlterTypeAddValue { - if_not_exists: false, - value: Ident::with_quote('\'', "label5"), - position: None, - }), - }, - ] - .into_iter() - .enumerate() - .for_each(|(index, tc)| { - let statement = pg_and_generic().verified_stmt(tc.sql); - if let Statement::AlterType(AlterType { name, operation }) = statement { - assert_eq!(tc.name, name.to_string(), "TestCase[{index}].name"); - assert_eq!(tc.operation, operation, "TestCase[{index}].operation"); - } else { - unreachable!("{:?} should parse to Statement::AlterType", tc.sql); - } - }); -} - -#[test] -fn parse_bitstring_literal() { - let select = pg_and_generic().verified_only_select("SELECT B'111'"); - assert_eq!( - select.projection, - vec![SelectItem::UnnamedExpr(Expr::Value( - (Value::SingleQuotedByteStringLiteral("111".to_string())).with_empty_span() - ))] - ); -} - -#[test] -fn parse_varbit_datatype() { - match pg_and_generic().verified_stmt("CREATE TABLE foo (x VARBIT, y VARBIT(42))") { - Statement::CreateTable(CreateTable { columns, .. }) => { - assert_eq!( - columns, - vec![ - ColumnDef { - name: "x".into(), - data_type: DataType::VarBit(None), - options: vec![], - }, - ColumnDef { - name: "y".into(), - data_type: DataType::VarBit(Some(42)), - options: vec![], - } - ] - ); - } _ => unreachable!(), } } - -#[test] -fn parse_alter_table_replica_identity() { - match pg_and_generic().verified_stmt("ALTER TABLE foo REPLICA IDENTITY FULL") { - Statement::AlterTable { operations, .. } => { - assert_eq!( - operations, - vec![AlterTableOperation::ReplicaIdentity { - identity: ReplicaIdentity::Full - }] - ); - } - _ => unreachable!(), - } - - match pg_and_generic().verified_stmt("ALTER TABLE foo REPLICA IDENTITY USING INDEX foo_idx") { - Statement::AlterTable { operations, .. } => { - assert_eq!( - operations, - vec![AlterTableOperation::ReplicaIdentity { - identity: ReplicaIdentity::Index("foo_idx".into()) - }] - ); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_ts_datatypes() { - match pg_and_generic().verified_stmt("CREATE TABLE foo (x TSVECTOR)") { - Statement::CreateTable(CreateTable { columns, .. }) => { - assert_eq!( - columns, - vec![ColumnDef { - name: "x".into(), - data_type: DataType::TsVector, - options: vec![], - }] - ); - } - _ => unreachable!(), - } - - match pg_and_generic().verified_stmt("CREATE TABLE foo (x TSQUERY)") { - Statement::CreateTable(CreateTable { columns, .. }) => { - assert_eq!( - columns, - vec![ColumnDef { - name: "x".into(), - data_type: DataType::TsQuery, - options: vec![], - }] - ); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_alter_table_constraint_not_valid() { - match pg_and_generic().verified_stmt( - "ALTER TABLE foo ADD CONSTRAINT bar FOREIGN KEY (baz) REFERENCES other(ref) NOT VALID", - ) { - Statement::AlterTable { operations, .. } => { - assert_eq!( - operations, - vec![AlterTableOperation::AddConstraint { - constraint: TableConstraint::ForeignKey { - name: Some("bar".into()), - index_name: None, - columns: vec!["baz".into()], - foreign_table: ObjectName::from(vec!["other".into()]), - referred_columns: vec!["ref".into()], - on_delete: None, - on_update: None, - characteristics: None, - }, - not_valid: true, - }] - ); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_alter_table_validate_constraint() { - match pg_and_generic().verified_stmt("ALTER TABLE foo VALIDATE CONSTRAINT bar") { - Statement::AlterTable { operations, .. } => { - assert_eq!( - operations, - vec![AlterTableOperation::ValidateConstraint { name: "bar".into() }] - ); - } - _ => unreachable!(), - } -} - -#[test] -fn parse_create_server() { - let test_cases = vec![ - ( - "CREATE SERVER myserver FOREIGN DATA WRAPPER postgres_fdw", - CreateServerStatement { - name: ObjectName::from(vec!["myserver".into()]), - if_not_exists: false, - server_type: None, - version: None, - foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]), - options: None, - }, - ), - ( - "CREATE SERVER IF NOT EXISTS myserver TYPE 'server_type' VERSION 'server_version' FOREIGN DATA WRAPPER postgres_fdw", - CreateServerStatement { - name: ObjectName::from(vec!["myserver".into()]), - if_not_exists: true, - server_type: Some(Ident { - value: "server_type".to_string(), - quote_style: Some('\''), - span: Span::empty(), - }), - version: Some(Ident { - value: "server_version".to_string(), - quote_style: Some('\''), - span: Span::empty(), - }), - foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]), - options: None, - } - ), - ( - "CREATE SERVER myserver2 FOREIGN DATA WRAPPER postgres_fdw OPTIONS (host 'foo', dbname 'foodb', port '5432')", - CreateServerStatement { - name: ObjectName::from(vec!["myserver2".into()]), - if_not_exists: false, - server_type: None, - version: None, - foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]), - options: Some(vec![ - CreateServerOption { - key: "host".into(), - value: Ident { - value: "foo".to_string(), - quote_style: Some('\''), - span: Span::empty(), - }, - }, - CreateServerOption { - key: "dbname".into(), - value: Ident { - value: "foodb".to_string(), - quote_style: Some('\''), - span: Span::empty(), - }, - }, - CreateServerOption { - key: "port".into(), - value: Ident { - value: "5432".to_string(), - quote_style: Some('\''), - span: Span::empty(), - }, - }, - ]), - } - ) - ]; - - for (sql, expected) in test_cases { - let Statement::CreateServer(stmt) = pg_and_generic().verified_stmt(sql) else { - unreachable!() - }; - assert_eq!(stmt, expected); - } -} diff --git a/tests/sqlparser_redshift.rs b/tests/sqlparser_redshift.rs index d539adf6..a25d5060 100644 --- a/tests/sqlparser_redshift.rs +++ b/tests/sqlparser_redshift.rs @@ -18,7 +18,6 @@ #[macro_use] mod test_utils; -use sqlparser::tokenizer::Span; use test_utils::*; use sqlparser::ast::*; @@ -32,25 +31,30 @@ fn test_square_brackets_over_db_schema_table_name() { select.projection[0], SelectItem::UnnamedExpr(Expr::Identifier(Ident { value: "col1".to_string(), - quote_style: Some('['), - span: Span::empty(), + quote_style: Some('[') })), ); assert_eq!( select.from[0], TableWithJoins { - relation: table_from_name(ObjectName::from(vec![ - Ident { - value: "test_schema".to_string(), - quote_style: Some('['), - span: Span::empty(), - }, - Ident { - value: "test_table".to_string(), - quote_style: Some('['), - span: Span::empty(), - } - ])), + relation: TableFactor::Table { + name: ObjectName(vec![ + Ident { + value: "test_schema".to_string(), + quote_style: Some('[') + }, + Ident { + value: "test_table".to_string(), + quote_style: Some('[') + } + ]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], } ); @@ -74,25 +78,30 @@ fn test_double_quotes_over_db_schema_table_name() { select.projection[0], SelectItem::UnnamedExpr(Expr::Identifier(Ident { value: "col1".to_string(), - quote_style: Some('"'), - span: Span::empty(), + quote_style: Some('"') })), ); assert_eq!( select.from[0], TableWithJoins { - relation: table_from_name(ObjectName::from(vec![ - Ident { - value: "test_schema".to_string(), - quote_style: Some('"'), - span: Span::empty(), - }, - Ident { - value: "test_table".to_string(), - quote_style: Some('"'), - span: Span::empty(), - } - ])), + relation: TableFactor::Table { + name: ObjectName(vec![ + Ident { + value: "test_schema".to_string(), + quote_style: Some('"') + }, + Ident { + value: "test_table".to_string(), + quote_style: Some('"') + } + ]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], } ); @@ -112,12 +121,10 @@ fn parse_delimited_identifiers() { args, with_hints, version, - .. + with_ordinality: _, + partitions: _, } => { - assert_eq!( - ObjectName::from(vec![Ident::with_quote('"', "a table")]), - name - ); + assert_eq!(vec![Ident::with_quote('"', "a table")], name.0); assert_eq!(Ident::with_quote('"', "alias"), alias.unwrap().name); assert!(args.is_none()); assert!(with_hints.is_empty()); @@ -136,8 +143,7 @@ fn parse_delimited_identifiers() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::with_quote('"', "myfun")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::with_quote('"', "myfun")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -160,8 +166,6 @@ fn parse_delimited_identifiers() { } redshift().verified_stmt(r#"CREATE TABLE "foo" ("bar" "int")"#); - // An alias starting with a number - redshift().verified_stmt(r#"CREATE TABLE "foo" ("1" INT)"#); redshift().verified_stmt(r#"ALTER TABLE foo ADD CONSTRAINT "bar" PRIMARY KEY (baz)"#); //TODO verified_stmt(r#"UPDATE foo SET "bar" = 5"#); } @@ -192,218 +196,3 @@ fn test_create_view_with_no_schema_binding() { redshift_and_generic() .verified_stmt("CREATE VIEW myevent AS SELECT eventname FROM event WITH NO SCHEMA BINDING"); } - -#[test] -fn test_redshift_json_path() { - let dialects = all_dialects_where(|d| d.supports_partiql()); - let sql = "SELECT cust.c_orders[0].o_orderkey FROM customer_orders_lineitem"; - let select = dialects.verified_only_select(sql); - - assert_eq!( - &Expr::JsonAccess { - value: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("cust"), - Ident::new("c_orders") - ])), - path: JsonPath { - path: vec![ - JsonPathElem::Bracket { - key: Expr::value(number("0")) - }, - JsonPathElem::Dot { - key: "o_orderkey".to_string(), - quoted: false - } - ] - } - }, - expr_from_projection(only(&select.projection)) - ); - - let sql = "SELECT cust.c_orders[0]['id'] FROM customer_orders_lineitem"; - let select = dialects.verified_only_select(sql); - assert_eq!( - &Expr::JsonAccess { - value: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("cust"), - Ident::new("c_orders") - ])), - path: JsonPath { - path: vec![ - JsonPathElem::Bracket { - key: Expr::value(number("0")) - }, - JsonPathElem::Bracket { - key: Expr::Value( - (Value::SingleQuotedString("id".to_owned())).with_empty_span() - ) - } - ] - } - }, - expr_from_projection(only(&select.projection)) - ); - - let sql = "SELECT db1.sc1.tbl1.col1[0]['id'] FROM customer_orders_lineitem"; - let select = dialects.verified_only_select(sql); - assert_eq!( - &Expr::JsonAccess { - value: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("db1"), - Ident::new("sc1"), - Ident::new("tbl1"), - Ident::new("col1") - ])), - path: JsonPath { - path: vec![ - JsonPathElem::Bracket { - key: Expr::value(number("0")) - }, - JsonPathElem::Bracket { - key: Expr::Value( - (Value::SingleQuotedString("id".to_owned())).with_empty_span() - ) - } - ] - } - }, - expr_from_projection(only(&select.projection)) - ); - - let sql = r#"SELECT db1.sc1.tbl1.col1[0]."id" FROM customer_orders_lineitem"#; - let select = dialects.verified_only_select(sql); - assert_eq!( - &Expr::JsonAccess { - value: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("db1"), - Ident::new("sc1"), - Ident::new("tbl1"), - Ident::new("col1") - ])), - path: JsonPath { - path: vec![ - JsonPathElem::Bracket { - key: Expr::value(number("0")) - }, - JsonPathElem::Dot { - key: "id".to_string(), - quoted: true, - } - ] - } - }, - expr_from_projection(only(&select.projection)) - ); -} - -#[test] -fn test_parse_json_path_from() { - let dialects = all_dialects_where(|d| d.supports_partiql()); - let select = dialects.verified_only_select("SELECT * FROM src[0].a AS a"); - match &select.from[0].relation { - TableFactor::Table { - name, json_path, .. - } => { - assert_eq!(name, &ObjectName::from(vec![Ident::new("src")])); - assert_eq!( - json_path, - &Some(JsonPath { - path: vec![ - JsonPathElem::Bracket { - key: Expr::value(number("0")) - }, - JsonPathElem::Dot { - key: "a".to_string(), - quoted: false - } - ] - }) - ); - } - _ => panic!(), - } - - let select = dialects.verified_only_select("SELECT * FROM src[0].a[1].b AS a"); - match &select.from[0].relation { - TableFactor::Table { - name, json_path, .. - } => { - assert_eq!(name, &ObjectName::from(vec![Ident::new("src")])); - assert_eq!( - json_path, - &Some(JsonPath { - path: vec![ - JsonPathElem::Bracket { - key: Expr::value(number("0")) - }, - JsonPathElem::Dot { - key: "a".to_string(), - quoted: false - }, - JsonPathElem::Bracket { - key: Expr::Value( - (Value::Number("1".parse().unwrap(), false)).with_empty_span() - ) - }, - JsonPathElem::Dot { - key: "b".to_string(), - quoted: false - }, - ] - }) - ); - } - _ => panic!(), - } - - let select = dialects.verified_only_select("SELECT * FROM src.a.b"); - match &select.from[0].relation { - TableFactor::Table { - name, json_path, .. - } => { - assert_eq!( - name, - &ObjectName::from(vec![Ident::new("src"), Ident::new("a"), Ident::new("b")]) - ); - assert_eq!(json_path, &None); - } - _ => panic!(), - } -} - -#[test] -fn test_parse_select_numbered_columns() { - // An alias starting with a number - redshift_and_generic().verified_stmt(r#"SELECT 1 AS "1" FROM a"#); - redshift_and_generic().verified_stmt(r#"SELECT 1 AS "1abc" FROM a"#); -} - -#[test] -fn test_parse_nested_quoted_identifier() { - redshift().verified_stmt(r#"SELECT 1 AS ["1"] FROM a"#); - redshift().verified_stmt(r#"SELECT 1 AS ["[="] FROM a"#); - redshift().verified_stmt(r#"SELECT 1 AS ["=]"] FROM a"#); - redshift().verified_stmt(r#"SELECT 1 AS ["a[b]"] FROM a"#); - // trim spaces - redshift().one_statement_parses_to(r#"SELECT 1 AS [ " 1 " ]"#, r#"SELECT 1 AS [" 1 "]"#); - // invalid query - assert!(redshift() - .parse_sql_statements(r#"SELECT 1 AS ["1]"#) - .is_err()); -} - -#[test] -fn parse_extract_single_quotes() { - let sql = "SELECT EXTRACT('month' FROM my_timestamp) FROM my_table"; - redshift().verified_stmt(sql); -} - -#[test] -fn parse_string_literal_backslash_escape() { - redshift().one_statement_parses_to(r#"SELECT 'l\'auto'"#, "SELECT 'l''auto'"); -} - -#[test] -fn parse_utf8_multibyte_idents() { - redshift().verified_stmt("SELECT 🚀.city AS 🎸 FROM customers AS 🚀"); -} diff --git a/tests/sqlparser_snowflake.rs b/tests/sqlparser_snowflake.rs index 65546bee..c17c7b95 100644 --- a/tests/sqlparser_snowflake.rs +++ b/tests/sqlparser_snowflake.rs @@ -19,8 +19,9 @@ //! Test SQL syntax specific to Snowflake. The parser based on the //! generic dialect is also tested (on the inputs it can handle). -use sqlparser::ast::helpers::key_value_options::{KeyValueOption, KeyValueOptionType}; -use sqlparser::ast::helpers::stmt_data_loading::{StageLoadSelectItem, StageLoadSelectItemKind}; +use sqlparser::ast::helpers::stmt_data_loading::{ + DataLoadingOption, DataLoadingOptionType, StageLoadSelectItem, +}; use sqlparser::ast::*; use sqlparser::dialect::{Dialect, GenericDialect, SnowflakeDialect}; use sqlparser::parser::{ParserError, ParserOptions}; @@ -270,8 +271,8 @@ fn test_snowflake_create_table_with_tag() { assert_eq!("my_table", name.to_string()); assert_eq!( Some(vec![ - Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".to_string()), - Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".to_string()) + Tag::new("A".into(), "TAG A".to_string()), + Tag::new("B".into(), "TAG B".to_string()) ]), with_tags ); @@ -291,8 +292,8 @@ fn test_snowflake_create_table_with_tag() { assert_eq!("my_table", name.to_string()); assert_eq!( Some(vec![ - Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".to_string()), - Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".to_string()) + Tag::new("A".into(), "TAG A".to_string()), + Tag::new("B".into(), "TAG B".to_string()) ]), with_tags ); @@ -345,6 +346,7 @@ fn test_snowflake_create_table_column_comment() { name: None, option: ColumnOption::Comment("some comment".to_string()) }], + collation: None }], columns ) @@ -353,15 +355,6 @@ fn test_snowflake_create_table_column_comment() { } } -#[test] -fn test_snowflake_create_table_on_commit() { - snowflake().verified_stmt( - r#"CREATE LOCAL TEMPORARY TABLE "AAA"."foo" ("bar" INTEGER) ON COMMIT PRESERVE ROWS"#, - ); - snowflake().verified_stmt(r#"CREATE TABLE "AAA"."foo" ("bar" INTEGER) ON COMMIT DELETE ROWS"#); - snowflake().verified_stmt(r#"CREATE TABLE "AAA"."foo" ("bar" INTEGER) ON COMMIT DROP"#); -} - #[test] fn test_snowflake_create_local_table() { match snowflake().verified_stmt("CREATE TABLE my_table (a INT)") { @@ -446,56 +439,19 @@ fn test_snowflake_create_table_if_not_exists() { } _ => unreachable!(), } - - for (sql, parse_to) in [ - ( - r#"CREATE TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#, - r#"CREATE TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#, - ), - ( - r#"CREATE TABLE "A"."B"."C" IF NOT EXISTS (v VARIANT)"#, - r#"CREATE TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#, - ), - ( - r#"CREATE TRANSIENT TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#, - r#"CREATE TRANSIENT TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#, - ), - ( - r#"CREATE TRANSIENT TABLE "A"."B"."C" IF NOT EXISTS (v VARIANT)"#, - r#"CREATE TRANSIENT TABLE IF NOT EXISTS "A"."B"."C" (v VARIANT)"#, - ), - ] { - snowflake().one_statement_parses_to(sql, parse_to); - } } #[test] fn test_snowflake_create_table_cluster_by() { - match snowflake().verified_stmt("CREATE TABLE my_table (a INT) CLUSTER BY (a, b, my_func(c))") { + match snowflake().verified_stmt("CREATE TABLE my_table (a INT) CLUSTER BY (a, b)") { Statement::CreateTable(CreateTable { name, cluster_by, .. }) => { assert_eq!("my_table", name.to_string()); assert_eq!( Some(WrappedCollection::Parentheses(vec![ - Expr::Identifier(Ident::new("a")), - Expr::Identifier(Ident::new("b")), - Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("my_func")]), - uses_odbc_syntax: false, - parameters: FunctionArguments::None, - args: FunctionArguments::List(FunctionArgumentList { - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr( - Expr::Identifier(Ident::new("c")) - ))], - duplicate_treatment: None, - clauses: vec![], - }), - filter: None, - null_treatment: None, - over: None, - within_group: vec![], - }), + Ident::new("a"), + Ident::new("b"), ])), cluster_by ) @@ -507,22 +463,9 @@ fn test_snowflake_create_table_cluster_by() { #[test] fn test_snowflake_create_table_comment() { match snowflake().verified_stmt("CREATE TABLE my_table (a INT) COMMENT = 'some comment'") { - Statement::CreateTable(CreateTable { - name, - table_options, - .. - }) => { + Statement::CreateTable(CreateTable { name, comment, .. }) => { assert_eq!("my_table", name.to_string()); - let plain_options = match table_options { - CreateTableOptions::Plain(options) => options, - _ => unreachable!(), - }; - let comment = match plain_options.first().unwrap() { - SqlOption::Comment(CommentDef::WithEq(c)) - | SqlOption::Comment(CommentDef::WithoutEq(c)) => c, - _ => unreachable!(), - }; - assert_eq!("some comment", comment); + assert_eq!("some comment", comment.unwrap().to_string()); } _ => unreachable!(), } @@ -601,6 +544,7 @@ fn test_snowflake_create_table_with_autoincrement_columns() { ColumnDef { name: "a".into(), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Identity(IdentityPropertyKind::Autoincrement( @@ -614,14 +558,15 @@ fn test_snowflake_create_table_with_autoincrement_columns() { ColumnDef { name: "b".into(), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Identity(IdentityPropertyKind::Autoincrement( IdentityProperty { parameters: Some(IdentityPropertyFormatKind::FunctionCall( IdentityParameters { - seed: Expr::value(number("100")), - increment: Expr::value(number("1")), + seed: Expr::Value(number("100")), + increment: Expr::Value(number("1")), } )), order: Some(IdentityPropertyOrder::NoOrder), @@ -632,6 +577,7 @@ fn test_snowflake_create_table_with_autoincrement_columns() { ColumnDef { name: "c".into(), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Identity(IdentityPropertyKind::Identity( @@ -645,6 +591,7 @@ fn test_snowflake_create_table_with_autoincrement_columns() { ColumnDef { name: "d".into(), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Identity(IdentityPropertyKind::Identity( @@ -652,12 +599,8 @@ fn test_snowflake_create_table_with_autoincrement_columns() { parameters: Some( IdentityPropertyFormatKind::StartAndIncrement( IdentityParameters { - seed: Expr::Value( - (number("100")).with_empty_span() - ), - increment: Expr::Value( - (number("1")).with_empty_span() - ), + seed: Expr::Value(number("100")), + increment: Expr::Value(number("1")), } ) ), @@ -682,12 +625,8 @@ fn test_snowflake_create_table_with_collated_column() { vec![ColumnDef { name: "a".into(), data_type: DataType::Text, - options: vec![ColumnOptionDef { - name: None, - option: ColumnOption::Collation(ObjectName::from(vec![Ident::with_quote( - '\'', "de_DE" - )])), - }] + collation: Some(ObjectName(vec![Ident::with_quote('\'', "de_DE")])), + options: vec![] },] ); } @@ -726,12 +665,13 @@ fn test_snowflake_create_table_with_columns_masking_policy() { vec![ColumnDef { name: "a".into(), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy( ColumnPolicyProperty { with, - policy_name: ObjectName::from(vec![Ident::new("p")]), + policy_name: "p".into(), using_columns, } )) @@ -760,12 +700,13 @@ fn test_snowflake_create_table_with_columns_projection_policy() { vec![ColumnDef { name: "a".into(), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy( ColumnPolicyProperty { with, - policy_name: ObjectName::from(vec![Ident::new("p")]), + policy_name: "p".into(), using_columns: None, } )) @@ -797,19 +738,14 @@ fn test_snowflake_create_table_with_columns_tags() { vec![ColumnDef { name: "a".into(), data_type: DataType::Int(None), + collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Tags(TagsColumnOption { with, tags: vec![ - Tag::new( - ObjectName::from(vec![Ident::new("A")]), - "TAG A".into() - ), - Tag::new( - ObjectName::from(vec![Ident::new("B")]), - "TAG B".into() - ), + Tag::new("A".into(), "TAG A".into()), + Tag::new("B".into(), "TAG B".into()), ] }), }], @@ -837,6 +773,7 @@ fn test_snowflake_create_table_with_several_column_options() { ColumnDef { name: "a".into(), data_type: DataType::Int(None), + collation: None, options: vec![ ColumnOptionDef { name: None, @@ -852,7 +789,7 @@ fn test_snowflake_create_table_with_several_column_options() { option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy( ColumnPolicyProperty { with: true, - policy_name: ObjectName::from(vec![Ident::new("p1")]), + policy_name: "p1".into(), using_columns: Some(vec!["a".into(), "b".into()]), } )), @@ -862,14 +799,8 @@ fn test_snowflake_create_table_with_several_column_options() { option: ColumnOption::Tags(TagsColumnOption { with: true, tags: vec![ - Tag::new( - ObjectName::from(vec![Ident::new("A")]), - "TAG A".into() - ), - Tag::new( - ObjectName::from(vec![Ident::new("B")]), - "TAG B".into() - ), + Tag::new("A".into(), "TAG A".into()), + Tag::new("B".into(), "TAG B".into()), ] }), } @@ -878,19 +809,14 @@ fn test_snowflake_create_table_with_several_column_options() { ColumnDef { name: "b".into(), data_type: DataType::Text, + collation: Some(ObjectName(vec![Ident::with_quote('\'', "de_DE")])), options: vec![ - ColumnOptionDef { - name: None, - option: ColumnOption::Collation(ObjectName::from(vec![ - Ident::with_quote('\'', "de_DE") - ])), - }, ColumnOptionDef { name: None, option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy( ColumnPolicyProperty { with: false, - policy_name: ObjectName::from(vec![Ident::new("p2")]), + policy_name: "p2".into(), using_columns: None, } )), @@ -900,14 +826,8 @@ fn test_snowflake_create_table_with_several_column_options() { option: ColumnOption::Tags(TagsColumnOption { with: false, tags: vec![ - Tag::new( - ObjectName::from(vec![Ident::new("C")]), - "TAG C".into() - ), - Tag::new( - ObjectName::from(vec![Ident::new("D")]), - "TAG D".into() - ), + Tag::new("C".into(), "TAG C".into()), + Tag::new("D".into(), "TAG D".into()), ] }), } @@ -920,81 +840,6 @@ fn test_snowflake_create_table_with_several_column_options() { } } -#[test] -fn test_snowflake_create_iceberg_table_all_options() { - match snowflake().verified_stmt("CREATE ICEBERG TABLE my_table (a INT, b INT) \ - CLUSTER BY (a, b) EXTERNAL_VOLUME = 'volume' CATALOG = 'SNOWFLAKE' BASE_LOCATION = 'relative/path' CATALOG_SYNC = 'OPEN_CATALOG' \ - STORAGE_SERIALIZATION_POLICY = COMPATIBLE COPY GRANTS CHANGE_TRACKING=TRUE DATA_RETENTION_TIME_IN_DAYS=5 MAX_DATA_EXTENSION_TIME_IN_DAYS=10 \ - WITH AGGREGATION POLICY policy_name WITH ROW ACCESS POLICY policy_name ON (a) WITH TAG (A='TAG A', B='TAG B')") { - Statement::CreateTable(CreateTable { - name, cluster_by, base_location, - external_volume, catalog, catalog_sync, - storage_serialization_policy, change_tracking, - copy_grants, data_retention_time_in_days, - max_data_extension_time_in_days, with_aggregation_policy, - with_row_access_policy, with_tags, .. - }) => { - assert_eq!("my_table", name.to_string()); - assert_eq!( - Some(WrappedCollection::Parentheses(vec![ - Expr::Identifier(Ident::new("a")), - Expr::Identifier(Ident::new("b")), - ])), - cluster_by - ); - assert_eq!("relative/path", base_location.unwrap()); - assert_eq!("volume", external_volume.unwrap()); - assert_eq!("SNOWFLAKE", catalog.unwrap()); - assert_eq!("OPEN_CATALOG", catalog_sync.unwrap()); - assert_eq!(StorageSerializationPolicy::Compatible, storage_serialization_policy.unwrap()); - assert!(change_tracking.unwrap()); - assert!(copy_grants); - assert_eq!(Some(5), data_retention_time_in_days); - assert_eq!(Some(10), max_data_extension_time_in_days); - assert_eq!( - Some("WITH ROW ACCESS POLICY policy_name ON (a)".to_string()), - with_row_access_policy.map(|policy| policy.to_string()) - ); - assert_eq!( - Some("policy_name".to_string()), - with_aggregation_policy.map(|name| name.to_string()) - ); - assert_eq!(Some(vec![ - Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".into()), - Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".into()), - ]), with_tags); - - } - _ => unreachable!(), - } -} - -#[test] -fn test_snowflake_create_iceberg_table() { - match snowflake() - .verified_stmt("CREATE ICEBERG TABLE my_table (a INT) BASE_LOCATION = 'relative_path'") - { - Statement::CreateTable(CreateTable { - name, - base_location, - .. - }) => { - assert_eq!("my_table", name.to_string()); - assert_eq!("relative_path", base_location.unwrap()); - } - _ => unreachable!(), - } -} - -#[test] -fn test_snowflake_create_iceberg_table_without_location() { - let res = snowflake().parse_sql_statements("CREATE ICEBERG TABLE my_table (a INT)"); - assert_eq!( - ParserError::ParserError("BASE_LOCATION is required for ICEBERG tables".to_string()), - res.unwrap_err() - ); -} - #[test] fn parse_sf_create_or_replace_view_with_comment_missing_equal() { assert!(snowflake_and_generic() @@ -1044,21 +889,6 @@ fn parse_sf_create_or_replace_with_comment_for_snowflake() { } } -#[test] -fn parse_sf_create_table_or_view_with_dollar_quoted_comment() { - // Snowflake transforms dollar quoted comments into a common comment in DDL representation of creation - snowflake() - .one_statement_parses_to( - r#"CREATE OR REPLACE TEMPORARY VIEW foo.bar.baz ("COL_1" COMMENT $$comment 1$$) COMMENT = $$view comment$$ AS (SELECT 1)"#, - r#"CREATE OR REPLACE TEMPORARY VIEW foo.bar.baz ("COL_1" COMMENT 'comment 1') COMMENT = 'view comment' AS (SELECT 1)"# - ); - - snowflake().one_statement_parses_to( - r#"CREATE TABLE my_table (a STRING COMMENT $$comment 1$$) COMMENT = $$table comment$$"#, - r#"CREATE TABLE my_table (a STRING COMMENT 'comment 1') COMMENT = 'table comment'"#, - ); -} - #[test] fn test_sf_derived_table_in_parenthesis() { // Nesting a subquery in an extra set of parentheses is non-standard, @@ -1195,9 +1025,9 @@ fn parse_semi_structured_data_traversal() { path: JsonPath { path: vec![JsonPathElem::Bracket { key: Expr::BinaryOp { - left: Box::new(Expr::value(number("2"))), + left: Box::new(Expr::Value(number("2"))), op: BinaryOperator::Plus, - right: Box::new(Expr::value(number("2"))) + right: Box::new(Expr::Value(number("2"))) }, }] }, @@ -1275,7 +1105,7 @@ fn parse_semi_structured_data_traversal() { quoted: false, }, JsonPathElem::Bracket { - key: Expr::value(number("0")), + key: Expr::Value(number("0")), }, JsonPathElem::Dot { key: "bar".to_owned(), @@ -1297,7 +1127,7 @@ fn parse_semi_structured_data_traversal() { path: JsonPath { path: vec![ JsonPathElem::Bracket { - key: Expr::value(number("0")), + key: Expr::Value(number("0")), }, JsonPathElem::Dot { key: "foo".to_owned(), @@ -1342,32 +1172,6 @@ fn parse_semi_structured_data_traversal() { .to_string(), "sql parser error: Expected: variant object key name, found: 42" ); - - // casting a json access and accessing an array element - assert_eq!( - snowflake().verified_expr("a:b::ARRAY[1]"), - Expr::JsonAccess { - value: Box::new(Expr::Cast { - kind: CastKind::DoubleColon, - data_type: DataType::Array(ArrayElemTypeDef::None), - format: None, - expr: Box::new(Expr::JsonAccess { - value: Box::new(Expr::Identifier(Ident::new("a"))), - path: JsonPath { - path: vec![JsonPathElem::Dot { - key: "b".to_string(), - quoted: false - }] - } - }) - }), - path: JsonPath { - path: vec![JsonPathElem::Bracket { - key: Expr::value(number("1")) - }] - } - } - ); } #[test] @@ -1384,12 +1188,10 @@ fn parse_delimited_identifiers() { args, with_hints, version, - .. + with_ordinality: _, + partitions: _, } => { - assert_eq!( - ObjectName::from(vec![Ident::with_quote('"', "a table")]), - name - ); + assert_eq!(vec![Ident::with_quote('"', "a table")], name.0); assert_eq!(Ident::with_quote('"', "alias"), alias.unwrap().name); assert!(args.is_none()); assert!(with_hints.is_empty()); @@ -1408,8 +1210,7 @@ fn parse_delimited_identifiers() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName::from(vec![Ident::with_quote('"', "myfun")]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::with_quote('"', "myfun")]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -1480,7 +1281,7 @@ fn test_select_wildcard_with_exclude() { let select = snowflake_and_generic() .verified_only_select("SELECT name.* EXCLUDE department_id FROM employee_table"); let expected = SelectItem::QualifiedWildcard( - SelectItemQualifiedWildcardKind::ObjectName(ObjectName::from(vec![Ident::new("name")])), + ObjectName(vec![Ident::new("name")]), WildcardAdditionalOptions { opt_exclude: Some(ExcludeSelectItem::Single(Ident::new("department_id"))), ..Default::default() @@ -1517,7 +1318,7 @@ fn test_select_wildcard_with_rename() { "SELECT name.* RENAME (department_id AS new_dep, employee_id AS new_emp) FROM employee_table", ); let expected = SelectItem::QualifiedWildcard( - SelectItemQualifiedWildcardKind::ObjectName(ObjectName::from(vec![Ident::new("name")])), + ObjectName(vec![Ident::new("name")]), WildcardAdditionalOptions { opt_rename: Some(RenameSelectItem::Multiple(vec![ IdentWithAlias { @@ -1609,50 +1410,6 @@ fn test_alter_table_swap_with() { }; } -#[test] -fn test_alter_table_clustering() { - let sql = r#"ALTER TABLE tab CLUSTER BY (c1, "c2", TO_DATE(c3))"#; - match alter_table_op(snowflake_and_generic().verified_stmt(sql)) { - AlterTableOperation::ClusterBy { exprs } => { - assert_eq!( - exprs, - [ - Expr::Identifier(Ident::new("c1")), - Expr::Identifier(Ident::with_quote('"', "c2")), - Expr::Function(Function { - name: ObjectName::from(vec![Ident::new("TO_DATE")]), - uses_odbc_syntax: false, - parameters: FunctionArguments::None, - args: FunctionArguments::List(FunctionArgumentList { - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr( - Expr::Identifier(Ident::new("c3")) - ))], - duplicate_treatment: None, - clauses: vec![], - }), - filter: None, - null_treatment: None, - over: None, - within_group: vec![] - }) - ], - ); - } - _ => unreachable!(), - } - - snowflake_and_generic().verified_stmt("ALTER TABLE tbl DROP CLUSTERING KEY"); - snowflake_and_generic().verified_stmt("ALTER TABLE tbl SUSPEND RECLUSTER"); - snowflake_and_generic().verified_stmt("ALTER TABLE tbl RESUME RECLUSTER"); -} - -#[test] -fn test_alter_iceberg_table() { - snowflake_and_generic().verified_stmt("ALTER ICEBERG TABLE tbl DROP CLUSTERING KEY"); - snowflake_and_generic().verified_stmt("ALTER ICEBERG TABLE tbl SUSPEND RECLUSTER"); - snowflake_and_generic().verified_stmt("ALTER ICEBERG TABLE tbl RESUME RECLUSTER"); -} - #[test] fn test_drop_stage() { match snowflake_and_generic().verified_stmt("DROP STAGE s1") { @@ -1755,13 +1512,13 @@ fn parse_snowflake_declare_result_set() { ( "DECLARE res RESULTSET DEFAULT 42", "res", - Some(DeclareAssignment::Default(Expr::value(number("42")).into())), + Some(DeclareAssignment::Default(Expr::Value(number("42")).into())), ), ( "DECLARE res RESULTSET := 42", "res", Some(DeclareAssignment::DuckAssignment( - Expr::value(number("42")).into(), + Expr::Value(number("42")).into(), )), ), ("DECLARE res RESULTSET", "res", None), @@ -1811,8 +1568,8 @@ fn parse_snowflake_declare_exception() { "ex", Some(DeclareAssignment::Expr( Expr::Tuple(vec![ - Expr::value(number("42")), - Expr::Value((Value::SingleQuotedString("ERROR".to_string())).with_empty_span()), + Expr::Value(number("42")), + Expr::Value(Value::SingleQuotedString("ERROR".to_string())), ]) .into(), )), @@ -1848,13 +1605,13 @@ fn parse_snowflake_declare_variable() { "DECLARE profit TEXT DEFAULT 42", "profit", Some(DataType::Text), - Some(DeclareAssignment::Default(Expr::value(number("42")).into())), + Some(DeclareAssignment::Default(Expr::Value(number("42")).into())), ), ( "DECLARE profit DEFAULT 42", "profit", None, - Some(DeclareAssignment::Default(Expr::value(number("42")).into())), + Some(DeclareAssignment::Default(Expr::Value(number("42")).into())), ), ("DECLARE profit TEXT", "profit", Some(DataType::Text), None), ("DECLARE profit", "profit", None, None), @@ -2007,26 +1764,38 @@ fn test_create_stage_with_stage_params() { "", stage_params.endpoint.unwrap() ); - assert!(stage_params.credentials.options.contains(&KeyValueOption { - option_name: "AWS_KEY_ID".to_string(), - option_type: KeyValueOptionType::STRING, - value: "1a2b3c".to_string() - })); - assert!(stage_params.credentials.options.contains(&KeyValueOption { - option_name: "AWS_SECRET_KEY".to_string(), - option_type: KeyValueOptionType::STRING, - value: "4x5y6z".to_string() - })); - assert!(stage_params.encryption.options.contains(&KeyValueOption { - option_name: "MASTER_KEY".to_string(), - option_type: KeyValueOptionType::STRING, - value: "key".to_string() - })); - assert!(stage_params.encryption.options.contains(&KeyValueOption { - option_name: "TYPE".to_string(), - option_type: KeyValueOptionType::STRING, - value: "AWS_SSE_KMS".to_string() - })); + assert!(stage_params + .credentials + .options + .contains(&DataLoadingOption { + option_name: "AWS_KEY_ID".to_string(), + option_type: DataLoadingOptionType::STRING, + value: "1a2b3c".to_string() + })); + assert!(stage_params + .credentials + .options + .contains(&DataLoadingOption { + option_name: "AWS_SECRET_KEY".to_string(), + option_type: DataLoadingOptionType::STRING, + value: "4x5y6z".to_string() + })); + assert!(stage_params + .encryption + .options + .contains(&DataLoadingOption { + option_name: "MASTER_KEY".to_string(), + option_type: DataLoadingOptionType::STRING, + value: "key".to_string() + })); + assert!(stage_params + .encryption + .options + .contains(&DataLoadingOption { + option_name: "TYPE".to_string(), + option_type: DataLoadingOptionType::STRING, + value: "AWS_SSE_KMS".to_string() + })); } _ => unreachable!(), }; @@ -2047,19 +1816,19 @@ fn test_create_stage_with_directory_table_params() { directory_table_params, .. } => { - assert!(directory_table_params.options.contains(&KeyValueOption { + assert!(directory_table_params.options.contains(&DataLoadingOption { option_name: "ENABLE".to_string(), - option_type: KeyValueOptionType::BOOLEAN, + option_type: DataLoadingOptionType::BOOLEAN, value: "TRUE".to_string() })); - assert!(directory_table_params.options.contains(&KeyValueOption { + assert!(directory_table_params.options.contains(&DataLoadingOption { option_name: "REFRESH_ON_CREATE".to_string(), - option_type: KeyValueOptionType::BOOLEAN, + option_type: DataLoadingOptionType::BOOLEAN, value: "FALSE".to_string() })); - assert!(directory_table_params.options.contains(&KeyValueOption { + assert!(directory_table_params.options.contains(&DataLoadingOption { option_name: "NOTIFICATION_INTEGRATION".to_string(), - option_type: KeyValueOptionType::STRING, + option_type: DataLoadingOptionType::STRING, value: "some-string".to_string() })); } @@ -2078,19 +1847,19 @@ fn test_create_stage_with_file_format() { match snowflake_without_unescape().verified_stmt(sql) { Statement::CreateStage { file_format, .. } => { - assert!(file_format.options.contains(&KeyValueOption { + assert!(file_format.options.contains(&DataLoadingOption { option_name: "COMPRESSION".to_string(), - option_type: KeyValueOptionType::ENUM, + option_type: DataLoadingOptionType::ENUM, value: "AUTO".to_string() })); - assert!(file_format.options.contains(&KeyValueOption { + assert!(file_format.options.contains(&DataLoadingOption { option_name: "BINARY_FORMAT".to_string(), - option_type: KeyValueOptionType::ENUM, + option_type: DataLoadingOptionType::ENUM, value: "HEX".to_string() })); - assert!(file_format.options.contains(&KeyValueOption { + assert!(file_format.options.contains(&DataLoadingOption { option_name: "ESCAPE".to_string(), - option_type: KeyValueOptionType::STRING, + option_type: DataLoadingOptionType::STRING, value: r#"\\"#.to_string() })); } @@ -2111,14 +1880,14 @@ fn test_create_stage_with_copy_options() { ); match snowflake().verified_stmt(sql) { Statement::CreateStage { copy_options, .. } => { - assert!(copy_options.options.contains(&KeyValueOption { + assert!(copy_options.options.contains(&DataLoadingOption { option_name: "ON_ERROR".to_string(), - option_type: KeyValueOptionType::ENUM, + option_type: DataLoadingOptionType::ENUM, value: "CONTINUE".to_string() })); - assert!(copy_options.options.contains(&KeyValueOption { + assert!(copy_options.options.contains(&DataLoadingOption { option_name: "FORCE".to_string(), - option_type: KeyValueOptionType::BOOLEAN, + option_type: DataLoadingOptionType::BOOLEAN, value: "TRUE".to_string() })); } @@ -2135,25 +1904,20 @@ fn test_copy_into() { ); match snowflake().verified_stmt(sql) { Statement::CopyIntoSnowflake { - kind, into, - from_obj, + from_stage, files, pattern, validation_mode, .. } => { - assert_eq!(kind, CopyIntoSnowflakeKind::Table); assert_eq!( into, - ObjectName::from(vec![Ident::new("my_company"), Ident::new("emp_basic")]) + ObjectName(vec![Ident::new("my_company"), Ident::new("emp_basic")]) ); assert_eq!( - from_obj, - Some(ObjectName::from(vec![Ident::with_quote( - '\'', - "gcs://mybucket/./../a.csv" - )])) + from_stage, + ObjectName(vec![Ident::with_quote('\'', "gcs://mybucket/./../a.csv")]) ); assert!(files.is_none()); assert!(pattern.is_none()); @@ -2162,60 +1926,6 @@ fn test_copy_into() { _ => unreachable!(), }; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); - - let sql = concat!("COPY INTO 's3://a/b/c/data.parquet' ", "FROM db.sc.tbl ", "PARTITION BY ('date=' || to_varchar(dt, 'YYYY-MM-DD') || '/hour=' || to_varchar(date_part(hour, ts)))"); - match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { - kind, - into, - from_obj, - from_query, - partition, - .. - } => { - assert_eq!(kind, CopyIntoSnowflakeKind::Location); - assert_eq!( - into, - ObjectName::from(vec![Ident::with_quote('\'', "s3://a/b/c/data.parquet")]) - ); - assert_eq!( - from_obj, - Some(ObjectName::from(vec![ - Ident::new("db"), - Ident::new("sc"), - Ident::new("tbl") - ])) - ); - assert!(from_query.is_none()); - assert!(partition.is_some()); - } - _ => unreachable!(), - }; - assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); - - let sql = concat!( - "COPY INTO 's3://a/b/c/data.parquet' ", - "FROM (SELECT * FROM tbl)" - ); - match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { - kind, - into, - from_obj, - from_query, - .. - } => { - assert_eq!(kind, CopyIntoSnowflakeKind::Location); - assert_eq!( - into, - ObjectName::from(vec![Ident::with_quote('\'', "s3://a/b/c/data.parquet")]) - ); - assert!(from_query.is_some()); - assert!(from_obj.is_none()); - } - _ => unreachable!(), - }; - assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); } #[test] @@ -2231,43 +1941,52 @@ fn test_copy_into_with_stage_params() { match snowflake().verified_stmt(sql) { Statement::CopyIntoSnowflake { - from_obj, + from_stage, stage_params, .. } => { //assert_eq!("s3://load/files/", stage_params.url.unwrap()); assert_eq!( - from_obj, - Some(ObjectName::from(vec![Ident::with_quote( - '\'', - "s3://load/files/" - )])) + from_stage, + ObjectName(vec![Ident::with_quote('\'', "s3://load/files/")]) ); assert_eq!("myint", stage_params.storage_integration.unwrap()); assert_eq!( "", stage_params.endpoint.unwrap() ); - assert!(stage_params.credentials.options.contains(&KeyValueOption { - option_name: "AWS_KEY_ID".to_string(), - option_type: KeyValueOptionType::STRING, - value: "1a2b3c".to_string() - })); - assert!(stage_params.credentials.options.contains(&KeyValueOption { - option_name: "AWS_SECRET_KEY".to_string(), - option_type: KeyValueOptionType::STRING, - value: "4x5y6z".to_string() - })); - assert!(stage_params.encryption.options.contains(&KeyValueOption { - option_name: "MASTER_KEY".to_string(), - option_type: KeyValueOptionType::STRING, - value: "key".to_string() - })); - assert!(stage_params.encryption.options.contains(&KeyValueOption { - option_name: "TYPE".to_string(), - option_type: KeyValueOptionType::STRING, - value: "AWS_SSE_KMS".to_string() - })); + assert!(stage_params + .credentials + .options + .contains(&DataLoadingOption { + option_name: "AWS_KEY_ID".to_string(), + option_type: DataLoadingOptionType::STRING, + value: "1a2b3c".to_string() + })); + assert!(stage_params + .credentials + .options + .contains(&DataLoadingOption { + option_name: "AWS_SECRET_KEY".to_string(), + option_type: DataLoadingOptionType::STRING, + value: "4x5y6z".to_string() + })); + assert!(stage_params + .encryption + .options + .contains(&DataLoadingOption { + option_name: "MASTER_KEY".to_string(), + option_type: DataLoadingOptionType::STRING, + value: "key".to_string() + })); + assert!(stage_params + .encryption + .options + .contains(&DataLoadingOption { + option_name: "TYPE".to_string(), + option_type: DataLoadingOptionType::STRING, + value: "AWS_SSE_KMS".to_string() + })); } _ => unreachable!(), }; @@ -2282,16 +2001,13 @@ fn test_copy_into_with_stage_params() { match snowflake().verified_stmt(sql) { Statement::CopyIntoSnowflake { - from_obj, + from_stage, stage_params, .. } => { assert_eq!( - from_obj, - Some(ObjectName::from(vec![Ident::with_quote( - '\'', - "s3://load/files/" - )])) + from_stage, + ObjectName(vec![Ident::with_quote('\'', "s3://load/files/")]) ); assert_eq!("myint", stage_params.storage_integration.unwrap()); } @@ -2314,13 +2030,13 @@ fn test_copy_into_with_files_and_pattern_and_verification() { files, pattern, validation_mode, - from_obj_alias, + from_stage_alias, .. } => { assert_eq!(files.unwrap(), vec!["file1.json", "file2.json"]); assert_eq!(pattern.unwrap(), ".*employees0[1-5].csv.gz"); assert_eq!(validation_mode.unwrap(), "RETURN_7_ROWS"); - assert_eq!(from_obj_alias.unwrap(), Ident::new("some_alias")); + assert_eq!(from_stage_alias.unwrap(), Ident::new("some_alias")); } _ => unreachable!(), } @@ -2331,7 +2047,7 @@ fn test_copy_into_with_files_and_pattern_and_verification() { fn test_copy_into_with_transformations() { let sql = concat!( "COPY INTO my_company.emp_basic FROM ", - "(SELECT t1.$1:st AS st, $1:index, t2.$1, 4, '5' AS const_str FROM @schema.general_finished AS T) ", + "(SELECT t1.$1:st AS st, $1:index, t2.$1 FROM @schema.general_finished AS T) ", "FILES = ('file1.json', 'file2.json') ", "PATTERN = '.*employees0[1-5].csv.gz' ", "VALIDATION_MODE = RETURN_7_ROWS" @@ -2339,68 +2055,45 @@ fn test_copy_into_with_transformations() { match snowflake().verified_stmt(sql) { Statement::CopyIntoSnowflake { - from_obj, + from_stage, from_transformations, .. } => { assert_eq!( - from_obj, - Some(ObjectName::from(vec![ - Ident::new("@schema"), - Ident::new("general_finished") - ])) + from_stage, + ObjectName(vec![Ident::new("@schema"), Ident::new("general_finished")]) ); assert_eq!( from_transformations.as_ref().unwrap()[0], - StageLoadSelectItemKind::StageLoadSelectItem(StageLoadSelectItem { + StageLoadSelectItem { alias: Some(Ident::new("t1")), file_col_num: 1, element: Some(Ident::new("st")), item_as: Some(Ident::new("st")) - }) + } ); assert_eq!( from_transformations.as_ref().unwrap()[1], - StageLoadSelectItemKind::StageLoadSelectItem(StageLoadSelectItem { + StageLoadSelectItem { alias: None, file_col_num: 1, element: Some(Ident::new("index")), item_as: None - }) + } ); assert_eq!( from_transformations.as_ref().unwrap()[2], - StageLoadSelectItemKind::StageLoadSelectItem(StageLoadSelectItem { + StageLoadSelectItem { alias: Some(Ident::new("t2")), file_col_num: 1, element: None, item_as: None - }) - ); - assert_eq!( - from_transformations.as_ref().unwrap()[3], - StageLoadSelectItemKind::SelectItem(SelectItem::UnnamedExpr(Expr::Value( - Value::Number("4".parse().unwrap(), false).into() - ))) - ); - assert_eq!( - from_transformations.as_ref().unwrap()[4], - StageLoadSelectItemKind::SelectItem(SelectItem::ExprWithAlias { - expr: Expr::Value(Value::SingleQuotedString("5".parse().unwrap()).into()), - alias: Ident::new("const_str".to_string()) - }) + } ); } _ => unreachable!(), } assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); - - // Test optional AS keyword to denote an alias for the stage - let sql1 = concat!( - "COPY INTO my_company.emp_basic FROM ", - "(SELECT t1.$1:st AS st, $1:index, t2.$1, 4, '5' AS const_str FROM @schema.general_finished T) " - ); - snowflake().parse_sql_statements(sql1).unwrap(); } #[test] @@ -2415,19 +2108,19 @@ fn test_copy_into_file_format() { match snowflake_without_unescape().verified_stmt(sql) { Statement::CopyIntoSnowflake { file_format, .. } => { - assert!(file_format.options.contains(&KeyValueOption { + assert!(file_format.options.contains(&DataLoadingOption { option_name: "COMPRESSION".to_string(), - option_type: KeyValueOptionType::ENUM, + option_type: DataLoadingOptionType::ENUM, value: "AUTO".to_string() })); - assert!(file_format.options.contains(&KeyValueOption { + assert!(file_format.options.contains(&DataLoadingOption { option_name: "BINARY_FORMAT".to_string(), - option_type: KeyValueOptionType::ENUM, + option_type: DataLoadingOptionType::ENUM, value: "HEX".to_string() })); - assert!(file_format.options.contains(&KeyValueOption { + assert!(file_format.options.contains(&DataLoadingOption { option_name: "ESCAPE".to_string(), - option_type: KeyValueOptionType::STRING, + option_type: DataLoadingOptionType::STRING, value: r#"\\"#.to_string() })); } @@ -2437,41 +2130,6 @@ fn test_copy_into_file_format() { snowflake_without_unescape().verified_stmt(sql).to_string(), sql ); - - // Test commas in file format - let sql = concat!( - "COPY INTO my_company.emp_basic ", - "FROM 'gcs://mybucket/./../a.csv' ", - "FILES = ('file1.json', 'file2.json') ", - "PATTERN = '.*employees0[1-5].csv.gz' ", - r#"FILE_FORMAT=(COMPRESSION=AUTO, BINARY_FORMAT=HEX, ESCAPE='\\')"# - ); - - match snowflake_without_unescape() - .parse_sql_statements(sql) - .unwrap() - .first() - .unwrap() - { - Statement::CopyIntoSnowflake { file_format, .. } => { - assert!(file_format.options.contains(&KeyValueOption { - option_name: "COMPRESSION".to_string(), - option_type: KeyValueOptionType::ENUM, - value: "AUTO".to_string() - })); - assert!(file_format.options.contains(&KeyValueOption { - option_name: "BINARY_FORMAT".to_string(), - option_type: KeyValueOptionType::ENUM, - value: "HEX".to_string() - })); - assert!(file_format.options.contains(&KeyValueOption { - option_name: "ESCAPE".to_string(), - option_type: KeyValueOptionType::STRING, - value: r#"\\"#.to_string() - })); - } - _ => unreachable!(), - } } #[test] @@ -2486,14 +2144,14 @@ fn test_copy_into_copy_options() { match snowflake().verified_stmt(sql) { Statement::CopyIntoSnowflake { copy_options, .. } => { - assert!(copy_options.options.contains(&KeyValueOption { + assert!(copy_options.options.contains(&DataLoadingOption { option_name: "ON_ERROR".to_string(), - option_type: KeyValueOptionType::ENUM, + option_type: DataLoadingOptionType::ENUM, value: "CONTINUE".to_string() })); - assert!(copy_options.options.contains(&KeyValueOption { + assert!(copy_options.options.contains(&DataLoadingOption { option_name: "FORCE".to_string(), - option_type: KeyValueOptionType::BOOLEAN, + option_type: DataLoadingOptionType::BOOLEAN, value: "TRUE".to_string() })); } @@ -2503,55 +2161,37 @@ fn test_copy_into_copy_options() { } #[test] -fn test_snowflake_stage_object_names_into_location() { - let mut allowed_object_names = [ - ObjectName::from(vec![Ident::new("@namespace"), Ident::new("%table_name")]), - ObjectName::from(vec![ - Ident::new("@namespace"), - Ident::new("%table_name/path"), - ]), - ObjectName::from(vec![ - Ident::new("@namespace"), - Ident::new("stage_name/path"), - ]), - ObjectName::from(vec![Ident::new("@~/path")]), - ]; - - let allowed_names_into_location = [ +fn test_snowflake_stage_object_names() { + let allowed_formatted_names = [ + "my_company.emp_basic", "@namespace.%table_name", "@namespace.%table_name/path", "@namespace.stage_name/path", "@~/path", ]; - for it in allowed_names_into_location - .iter() - .zip(allowed_object_names.iter_mut()) - { - let (formatted_name, object_name) = it; - let sql = format!("COPY INTO {formatted_name} FROM 'gcs://mybucket/./../a.csv'"); - match snowflake().verified_stmt(&sql) { - Statement::CopyIntoSnowflake { into, .. } => { - assert_eq!(into.0, object_name.0) - } - _ => unreachable!(), - } - } -} - -#[test] -fn test_snowflake_stage_object_names_into_table() { let mut allowed_object_names = [ - ObjectName::from(vec![Ident::new("my_company"), Ident::new("emp_basic")]), - ObjectName::from(vec![Ident::new("emp_basic")]), + ObjectName(vec![Ident::new("my_company"), Ident::new("emp_basic")]), + ObjectName(vec![Ident::new("@namespace"), Ident::new("%table_name")]), + ObjectName(vec![ + Ident::new("@namespace"), + Ident::new("%table_name/path"), + ]), + ObjectName(vec![ + Ident::new("@namespace"), + Ident::new("stage_name/path"), + ]), + ObjectName(vec![Ident::new("@~/path")]), ]; - let allowed_names_into_table = ["my_company.emp_basic", "emp_basic"]; - for it in allowed_names_into_table + for it in allowed_formatted_names .iter() .zip(allowed_object_names.iter_mut()) { let (formatted_name, object_name) = it; - let sql = format!("COPY INTO {formatted_name} FROM 'gcs://mybucket/./../a.csv'"); + let sql = format!( + "COPY INTO {} FROM 'gcs://mybucket/./../a.csv'", + formatted_name + ); match snowflake().verified_stmt(&sql) { Statement::CopyIntoSnowflake { into, .. } => { assert_eq!(into.0, object_name.0) @@ -2566,37 +2206,13 @@ fn test_snowflake_copy_into() { let sql = "COPY INTO a.b FROM @namespace.stage_name"; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { into, from_obj, .. } => { + Statement::CopyIntoSnowflake { + into, from_stage, .. + } => { + assert_eq!(into, ObjectName(vec![Ident::new("a"), Ident::new("b")])); assert_eq!( - into, - ObjectName::from(vec![Ident::new("a"), Ident::new("b")]) - ); - assert_eq!( - from_obj, - Some(ObjectName::from(vec![ - Ident::new("@namespace"), - Ident::new("stage_name") - ])) - ) - } - _ => unreachable!(), - } - - // Test for non-ident characters in stage names - let sql = "COPY INTO a.b FROM @namespace.stage_name/x@x~x%x+"; - assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); - match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { into, from_obj, .. } => { - assert_eq!( - into, - ObjectName::from(vec![Ident::new("a"), Ident::new("b")]) - ); - assert_eq!( - from_obj, - Some(ObjectName::from(vec![ - Ident::new("@namespace"), - Ident::new("stage_name/x@x~x%x+") - ])) + from_stage, + ObjectName(vec![Ident::new("@namespace"), Ident::new("stage_name")]) ) } _ => unreachable!(), @@ -2608,20 +2224,19 @@ fn test_snowflake_copy_into_stage_name_ends_with_parens() { let sql = "COPY INTO SCHEMA.SOME_MONITORING_SYSTEM FROM (SELECT t.$1:st AS st FROM @schema.general_finished)"; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { into, from_obj, .. } => { + Statement::CopyIntoSnowflake { + into, from_stage, .. + } => { assert_eq!( into, - ObjectName::from(vec![ + ObjectName(vec![ Ident::new("SCHEMA"), Ident::new("SOME_MONITORING_SYSTEM") ]) ); assert_eq!( - from_obj, - Some(ObjectName::from(vec![ - Ident::new("@schema"), - Ident::new("general_finished") - ])) + from_stage, + ObjectName(vec![Ident::new("@schema"), Ident::new("general_finished")]) ) } _ => unreachable!(), @@ -2637,14 +2252,10 @@ fn test_snowflake_trim() { let select = snowflake().verified_only_select(sql_only_select); assert_eq!( &Expr::Trim { - expr: Box::new(Expr::Value( - (Value::SingleQuotedString("xyz".to_owned())).with_empty_span() - )), + expr: Box::new(Expr::Value(Value::SingleQuotedString("xyz".to_owned()))), trim_where: None, trim_what: None, - trim_characters: Some(vec![Expr::Value( - (Value::SingleQuotedString("a".to_owned())).with_empty_span() - )]), + trim_characters: Some(vec![Expr::Value(Value::SingleQuotedString("a".to_owned()))]), }, expr_from_projection(only(&select.projection)) ); @@ -2662,7 +2273,7 @@ fn test_number_placeholder() { let sql_only_select = "SELECT :1"; let select = snowflake().verified_only_select(sql_only_select); assert_eq!( - &Expr::Value((Value::Placeholder(":1".into())).with_empty_span()), + &Expr::Value(Value::Placeholder(":1".into())), expr_from_projection(only(&select.projection)) ); @@ -2808,7 +2419,7 @@ fn parse_comma_outer_join() { "myudf", [Expr::UnaryOp { op: UnaryOperator::Plus, - expr: Box::new(Expr::value(number("42"))) + expr: Box::new(Expr::Value(number("42"))) }] )), }) @@ -2993,20 +2604,6 @@ fn asof_joins() { "ON s.state = p.state ", "ORDER BY s.observed", )); - - // Test without explicit aliases - #[rustfmt::skip] - snowflake_and_generic().verified_query(concat!( - "SELECT * ", - "FROM snowtime ", - "ASOF JOIN raintime ", - "MATCH_CONDITION (snowtime.observed >= raintime.observed) ", - "ON snowtime.state = raintime.state ", - "ASOF JOIN preciptime ", - "MATCH_CONDITION (showtime.observed >= preciptime.observed) ", - "ON showtime.state = preciptime.state ", - "ORDER BY showtime.observed", - )); } #[test] @@ -3051,17 +2648,17 @@ fn parse_use() { let quote_styles = ['\'', '"', '`']; for object_name in &valid_object_names { // Test single identifier without quotes - assert_eq!( - snowflake().verified_stmt(&format!("USE {object_name}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( + std::assert_eq!( + snowflake().verified_stmt(&format!("USE {}", object_name)), + Statement::Use(Use::Object(ObjectName(vec![Ident::new( object_name.to_string() )]))) ); for "e in "e_styles { // Test single identifier with different type of quotes - assert_eq!( - snowflake().verified_stmt(&format!("USE {quote}{object_name}{quote}")), - Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( + std::assert_eq!( + snowflake().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)), + Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -3071,20 +2668,18 @@ fn parse_use() { for "e in "e_styles { // Test double identifier with different type of quotes - assert_eq!( - snowflake().verified_stmt(&format!( - "USE {quote}CATALOG{quote}.{quote}my_schema{quote}" - )), - Statement::Use(Use::Object(ObjectName::from(vec![ + std::assert_eq!( + snowflake().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)), + Statement::Use(Use::Object(ObjectName(vec![ Ident::with_quote(quote, "CATALOG"), Ident::with_quote(quote, "my_schema") ]))) ); } // Test double identifier without quotes - assert_eq!( + std::assert_eq!( snowflake().verified_stmt("USE mydb.my_schema"), - Statement::Use(Use::Object(ObjectName::from(vec![ + Statement::Use(Use::Object(ObjectName(vec![ Ident::new("mydb"), Ident::new("my_schema") ]))) @@ -3092,65 +2687,37 @@ fn parse_use() { for "e in "e_styles { // Test single and double identifier with keyword and different type of quotes - assert_eq!( - snowflake().verified_stmt(&format!("USE DATABASE {quote}my_database{quote}")), - Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote( + std::assert_eq!( + snowflake().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)), + Statement::Use(Use::Database(ObjectName(vec![Ident::with_quote( quote, "my_database".to_string(), )]))) ); - assert_eq!( - snowflake().verified_stmt(&format!("USE SCHEMA {quote}my_schema{quote}")), - Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote( + std::assert_eq!( + snowflake().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)), + Statement::Use(Use::Schema(ObjectName(vec![Ident::with_quote( quote, "my_schema".to_string(), )]))) ); - assert_eq!( - snowflake().verified_stmt(&format!( - "USE SCHEMA {quote}CATALOG{quote}.{quote}my_schema{quote}" - )), - Statement::Use(Use::Schema(ObjectName::from(vec![ + std::assert_eq!( + snowflake().verified_stmt(&format!("USE SCHEMA {0}CATALOG{0}.{0}my_schema{0}", quote)), + Statement::Use(Use::Schema(ObjectName(vec![ Ident::with_quote(quote, "CATALOG"), Ident::with_quote(quote, "my_schema") ]))) ); - assert_eq!( - snowflake().verified_stmt(&format!("USE ROLE {quote}my_role{quote}")), - Statement::Use(Use::Role(ObjectName::from(vec![Ident::with_quote( - quote, - "my_role".to_string(), - )]))) - ); - assert_eq!( - snowflake().verified_stmt(&format!("USE WAREHOUSE {quote}my_wh{quote}")), - Statement::Use(Use::Warehouse(ObjectName::from(vec![Ident::with_quote( - quote, - "my_wh".to_string(), - )]))) - ); } // Test invalid syntax - missing identifier let invalid_cases = ["USE SCHEMA", "USE DATABASE", "USE WAREHOUSE"]; for sql in &invalid_cases { - assert_eq!( + std::assert_eq!( snowflake().parse_sql_statements(sql).unwrap_err(), ParserError::ParserError("Expected: identifier, found: EOF".to_string()), ); } - - snowflake().verified_stmt("USE SECONDARY ROLES ALL"); - snowflake().verified_stmt("USE SECONDARY ROLES NONE"); - snowflake().verified_stmt("USE SECONDARY ROLES r1, r2, r3"); - - // The following is not documented by Snowflake but still works: - snowflake().one_statement_parses_to("USE SECONDARY ROLE ALL", "USE SECONDARY ROLES ALL"); - snowflake().one_statement_parses_to("USE SECONDARY ROLE NONE", "USE SECONDARY ROLES NONE"); - snowflake().one_statement_parses_to( - "USE SECONDARY ROLE r1, r2, r3", - "USE SECONDARY ROLES r1, r2, r3", - ); } #[test] @@ -3160,7 +2727,7 @@ fn view_comment_option_should_be_after_column_list() { "CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') COMMENT = 'Comment' AS SELECT a FROM t", "CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') WITH (foo = bar) COMMENT = 'Comment' AS SELECT a FROM t", ] { - snowflake() + snowflake_and_generic() .verified_stmt(sql); } } @@ -3169,7 +2736,7 @@ fn view_comment_option_should_be_after_column_list() { fn parse_view_column_descriptions() { let sql = "CREATE OR REPLACE VIEW v (a COMMENT 'Comment', b) AS SELECT a, b FROM table1"; - match snowflake().verified_stmt(sql) { + match snowflake_and_generic().verified_stmt(sql) { Statement::CreateView { name, columns, .. } => { assert_eq!(name.to_string(), "v"); assert_eq!( @@ -3178,9 +2745,7 @@ fn parse_view_column_descriptions() { ViewColumnDef { name: Ident::new("a"), data_type: None, - options: Some(ColumnOptions::SpaceSeparated(vec![ColumnOption::Comment( - "Comment".to_string() - )])), + options: Some(vec![ColumnOption::Comment("Comment".to_string())]), }, ViewColumnDef { name: Ident::new("b"), @@ -3196,9 +2761,7 @@ fn parse_view_column_descriptions() { #[test] fn test_parentheses_overflow() { - // TODO: increase / improve after we fix the recursion limit - // for real (see https://github.com/apache/datafusion-sqlparser-rs/issues/984) - let max_nesting_level: usize = 25; + let max_nesting_level: usize = 30; // Verify the recursion check is not too wasteful... (num of parentheses - 2 is acceptable) let slack = 2; @@ -3218,1223 +2781,3 @@ fn test_parentheses_overflow() { snowflake_with_recursion_limit(max_nesting_level).parse_sql_statements(sql.as_str()); assert_eq!(parsed.err(), Some(ParserError::RecursionLimitExceeded)); } - -#[test] -fn test_show_databases() { - snowflake().verified_stmt("SHOW DATABASES"); - snowflake().verified_stmt("SHOW TERSE DATABASES"); - snowflake().verified_stmt("SHOW DATABASES HISTORY"); - snowflake().verified_stmt("SHOW DATABASES LIKE '%abc%'"); - snowflake().verified_stmt("SHOW DATABASES STARTS WITH 'demo_db'"); - snowflake().verified_stmt("SHOW DATABASES LIMIT 12"); - snowflake() - .verified_stmt("SHOW DATABASES HISTORY LIKE '%aa' STARTS WITH 'demo' LIMIT 20 FROM 'abc'"); - snowflake().verified_stmt("SHOW DATABASES IN ACCOUNT abc"); -} - -#[test] -fn test_parse_show_schemas() { - snowflake().verified_stmt("SHOW SCHEMAS"); - snowflake().verified_stmt("SHOW TERSE SCHEMAS"); - snowflake().verified_stmt("SHOW SCHEMAS IN ACCOUNT"); - snowflake().verified_stmt("SHOW SCHEMAS IN ACCOUNT abc"); - snowflake().verified_stmt("SHOW SCHEMAS IN DATABASE"); - snowflake().verified_stmt("SHOW SCHEMAS IN DATABASE xyz"); - snowflake().verified_stmt("SHOW SCHEMAS HISTORY LIKE '%xa%'"); - snowflake().verified_stmt("SHOW SCHEMAS STARTS WITH 'abc' LIMIT 20"); - snowflake().verified_stmt("SHOW SCHEMAS IN DATABASE STARTS WITH 'abc' LIMIT 20 FROM 'xyz'"); -} - -#[test] -fn test_parse_show_objects() { - snowflake().verified_stmt("SHOW OBJECTS"); - snowflake().verified_stmt("SHOW OBJECTS IN abc"); - snowflake().verified_stmt("SHOW OBJECTS LIKE '%test%' IN abc"); - snowflake().verified_stmt("SHOW OBJECTS IN ACCOUNT"); - snowflake().verified_stmt("SHOW OBJECTS IN DATABASE"); - snowflake().verified_stmt("SHOW OBJECTS IN DATABASE abc"); - snowflake().verified_stmt("SHOW OBJECTS IN SCHEMA"); - snowflake().verified_stmt("SHOW OBJECTS IN SCHEMA abc"); - snowflake().verified_stmt("SHOW TERSE OBJECTS"); - snowflake().verified_stmt("SHOW TERSE OBJECTS IN abc"); - snowflake().verified_stmt("SHOW TERSE OBJECTS LIKE '%test%' IN abc"); - snowflake().verified_stmt("SHOW TERSE OBJECTS LIKE '%test%' IN abc STARTS WITH 'b'"); - snowflake().verified_stmt("SHOW TERSE OBJECTS LIKE '%test%' IN abc STARTS WITH 'b' LIMIT 10"); - snowflake() - .verified_stmt("SHOW TERSE OBJECTS LIKE '%test%' IN abc STARTS WITH 'b' LIMIT 10 FROM 'x'"); - match snowflake().verified_stmt("SHOW TERSE OBJECTS LIKE '%test%' IN abc") { - Statement::ShowObjects(ShowObjects { - terse, - show_options, - }) => { - assert!(terse); - let name = match show_options.show_in { - Some(ShowStatementIn { - parent_name: Some(val), - .. - }) => val.to_string(), - _ => unreachable!(), - }; - assert_eq!("abc", name); - let like = match show_options.filter_position { - Some(ShowStatementFilterPosition::Infix(ShowStatementFilter::Like(val))) => val, - _ => unreachable!(), - }; - assert_eq!("%test%", like); - } - _ => unreachable!(), - } -} - -#[test] -fn test_parse_show_tables() { - snowflake().verified_stmt("SHOW TABLES"); - snowflake().verified_stmt("SHOW TERSE TABLES"); - snowflake().verified_stmt("SHOW TABLES IN ACCOUNT"); - snowflake().verified_stmt("SHOW TABLES IN DATABASE"); - snowflake().verified_stmt("SHOW TABLES IN DATABASE xyz"); - snowflake().verified_stmt("SHOW TABLES IN SCHEMA"); - snowflake().verified_stmt("SHOW TABLES IN SCHEMA xyz"); - snowflake().verified_stmt("SHOW TABLES HISTORY LIKE '%xa%'"); - snowflake().verified_stmt("SHOW TABLES STARTS WITH 'abc' LIMIT 20"); - snowflake().verified_stmt("SHOW TABLES IN SCHEMA STARTS WITH 'abc' LIMIT 20 FROM 'xyz'"); - snowflake().verified_stmt("SHOW EXTERNAL TABLES"); - snowflake().verified_stmt("SHOW EXTERNAL TABLES IN ACCOUNT"); - snowflake().verified_stmt("SHOW EXTERNAL TABLES IN DATABASE"); - snowflake().verified_stmt("SHOW EXTERNAL TABLES IN DATABASE xyz"); - snowflake().verified_stmt("SHOW EXTERNAL TABLES IN SCHEMA"); - snowflake().verified_stmt("SHOW EXTERNAL TABLES IN SCHEMA xyz"); - snowflake().verified_stmt("SHOW EXTERNAL TABLES STARTS WITH 'abc' LIMIT 20"); - snowflake() - .verified_stmt("SHOW EXTERNAL TABLES IN SCHEMA STARTS WITH 'abc' LIMIT 20 FROM 'xyz'"); -} - -#[test] -fn test_show_views() { - snowflake().verified_stmt("SHOW VIEWS"); - snowflake().verified_stmt("SHOW TERSE VIEWS"); - snowflake().verified_stmt("SHOW VIEWS IN ACCOUNT"); - snowflake().verified_stmt("SHOW VIEWS IN DATABASE"); - snowflake().verified_stmt("SHOW VIEWS IN DATABASE xyz"); - snowflake().verified_stmt("SHOW VIEWS IN SCHEMA"); - snowflake().verified_stmt("SHOW VIEWS IN SCHEMA xyz"); - snowflake().verified_stmt("SHOW VIEWS STARTS WITH 'abc' LIMIT 20"); - snowflake().verified_stmt("SHOW VIEWS IN SCHEMA STARTS WITH 'abc' LIMIT 20 FROM 'xyz'"); -} - -#[test] -fn test_parse_show_columns_sql() { - snowflake().verified_stmt("SHOW COLUMNS IN TABLE"); - snowflake().verified_stmt("SHOW COLUMNS IN TABLE abc"); - snowflake().verified_stmt("SHOW COLUMNS LIKE '%xyz%' IN TABLE abc"); -} - -#[test] -fn test_projection_with_nested_trailing_commas() { - let sql = "SELECT a, FROM b, LATERAL FLATTEN(input => events)"; - let _ = snowflake().parse_sql_statements(sql).unwrap(); - - //Single nesting - let sql = "SELECT (SELECT a, FROM b, LATERAL FLATTEN(input => events))"; - let _ = snowflake().parse_sql_statements(sql).unwrap(); - - //Double nesting - let sql = "SELECT (SELECT (SELECT a, FROM b, LATERAL FLATTEN(input => events)))"; - let _ = snowflake().parse_sql_statements(sql).unwrap(); - - let sql = "SELECT a, b, FROM c, (SELECT d, e, FROM f, LATERAL FLATTEN(input => events))"; - let _ = snowflake().parse_sql_statements(sql).unwrap(); -} - -#[test] -fn test_sf_double_dot_notation() { - snowflake().verified_stmt("SELECT * FROM db_name..table_name"); - snowflake().verified_stmt("SELECT * FROM x, y..z JOIN a..b AS b ON x.id = b.id"); - - assert_eq!( - snowflake() - .parse_sql_statements("SELECT * FROM X.Y..") - .unwrap_err() - .to_string(), - "sql parser error: Expected: identifier, found: ." - ); - assert_eq!( - snowflake() - .parse_sql_statements("SELECT * FROM X..Y..Z") - .unwrap_err() - .to_string(), - "sql parser error: Expected: identifier, found: ." - ); - assert_eq!( - // Ensure we don't parse leading token - snowflake() - .parse_sql_statements("SELECT * FROM .X.Y") - .unwrap_err() - .to_string(), - "sql parser error: Expected: identifier, found: ." - ); -} - -#[test] -fn test_parse_double_dot_notation_wrong_position() {} - -#[test] -fn parse_insert_overwrite() { - let insert_overwrite_into = r#"INSERT OVERWRITE INTO schema.table SELECT a FROM b"#; - snowflake().verified_stmt(insert_overwrite_into); -} - -#[test] -fn test_table_sample() { - snowflake_and_generic().verified_stmt("SELECT * FROM testtable SAMPLE (10)"); - snowflake_and_generic().verified_stmt("SELECT * FROM testtable TABLESAMPLE (10)"); - snowflake_and_generic() - .verified_stmt("SELECT * FROM testtable AS t TABLESAMPLE BERNOULLI (10)"); - snowflake_and_generic().verified_stmt("SELECT * FROM testtable AS t TABLESAMPLE ROW (10)"); - snowflake_and_generic().verified_stmt("SELECT * FROM testtable AS t TABLESAMPLE ROW (10 ROWS)"); - snowflake_and_generic() - .verified_stmt("SELECT * FROM testtable TABLESAMPLE BLOCK (3) SEED (82)"); - snowflake_and_generic() - .verified_stmt("SELECT * FROM testtable TABLESAMPLE SYSTEM (3) REPEATABLE (82)"); - snowflake_and_generic().verified_stmt("SELECT id FROM mytable TABLESAMPLE (10) REPEATABLE (1)"); - snowflake_and_generic().verified_stmt("SELECT id FROM mytable TABLESAMPLE (10) SEED (1)"); -} - -#[test] -fn parse_ls_and_rm() { - snowflake().one_statement_parses_to("LS @~", "LIST @~"); - snowflake().one_statement_parses_to("RM @~", "REMOVE @~"); - - let statement = snowflake() - .verified_stmt("LIST @SNOWFLAKE_KAFKA_CONNECTOR_externalDataLakeSnowflakeConnector_STAGE_call_tracker_stream/"); - match statement { - Statement::List(command) => { - assert_eq!(command.stage, ObjectName::from(vec!["@SNOWFLAKE_KAFKA_CONNECTOR_externalDataLakeSnowflakeConnector_STAGE_call_tracker_stream/".into()])); - assert!(command.pattern.is_none()); - } - _ => unreachable!(), - }; - - let statement = - snowflake().verified_stmt("REMOVE @my_csv_stage/analysis/ PATTERN='.*data_0.*'"); - match statement { - Statement::Remove(command) => { - assert_eq!( - command.stage, - ObjectName::from(vec!["@my_csv_stage/analysis/".into()]) - ); - assert_eq!(command.pattern, Some(".*data_0.*".to_string())); - } - _ => unreachable!(), - }; - - snowflake().verified_stmt(r#"LIST @"STAGE_WITH_QUOTES""#); - // Semi-colon after stage name - should terminate the stage name - snowflake() - .parse_sql_statements("LIST @db1.schema1.stage1/dir1/;") - .unwrap(); -} - -#[test] -fn test_sql_keywords_as_select_item_ident() { - // Some keywords that should be parsed as an alias - let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT", "SORT"]; - for kw in unreserved_kws { - snowflake().verified_stmt(&format!("SELECT 1, {kw}")); - } - - // Some keywords that should not be parsed as an alias - let reserved_kws = vec![ - "FROM", - "GROUP", - "HAVING", - "INTERSECT", - "INTO", - "ORDER", - "SELECT", - "UNION", - "WHERE", - "WITH", - ]; - for kw in reserved_kws { - assert!(snowflake() - .parse_sql_statements(&format!("SELECT 1, {kw}")) - .is_err()); - } -} - -#[test] -fn test_sql_keywords_as_select_item_aliases() { - // Some keywords that should be parsed as an alias - let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT", "SORT"]; - for kw in unreserved_kws { - snowflake() - .one_statement_parses_to(&format!("SELECT 1 {kw}"), &format!("SELECT 1 AS {kw}")); - } - - // Some keywords that should not be parsed as an alias - let reserved_kws = vec![ - "FROM", - "GROUP", - "HAVING", - "INTERSECT", - "INTO", - "ORDER", - "SELECT", - "UNION", - "WHERE", - "WITH", - ]; - for kw in reserved_kws { - assert!(snowflake() - .parse_sql_statements(&format!("SELECT 1 {kw}")) - .is_err()); - } -} - -#[test] -fn test_sql_keywords_as_table_aliases() { - // Some keywords that should be parsed as an alias implicitly - let unreserved_kws = vec![ - "VIEW", - "EXPLAIN", - "ANALYZE", - "SORT", - "PIVOT", - "UNPIVOT", - "TOP", - "LIMIT", - "OFFSET", - "FETCH", - "EXCEPT", - "CLUSTER", - "DISTRIBUTE", - "GLOBAL", - "ANTI", - "SEMI", - "RETURNING", - "OUTER", - "WINDOW", - "END", - "PARTITION", - "PREWHERE", - "SETTINGS", - "FORMAT", - "MATCH_RECOGNIZE", - "OPEN", - ]; - - for kw in unreserved_kws { - snowflake().verified_stmt(&format!("SELECT * FROM tbl AS {kw}")); - snowflake().one_statement_parses_to( - &format!("SELECT * FROM tbl {kw}"), - &format!("SELECT * FROM tbl AS {kw}"), - ); - } - - // Some keywords that should not be parsed as an alias implicitly - let reserved_kws = vec![ - "FROM", "GROUP", "HAVING", "ORDER", "SELECT", "UNION", "WHERE", "WITH", - ]; - for kw in reserved_kws { - assert!(snowflake() - .parse_sql_statements(&format!("SELECT * FROM tbl {kw}")) - .is_err()); - } -} - -#[test] -fn test_timetravel_at_before() { - snowflake().verified_only_select("SELECT * FROM tbl AT(TIMESTAMP => '2024-12-15 00:00:00')"); - snowflake() - .verified_only_select("SELECT * FROM tbl BEFORE(TIMESTAMP => '2024-12-15 00:00:00')"); -} - -#[test] -fn test_grant_account_global_privileges() { - let privileges = vec![ - "ALL", - "ALL PRIVILEGES", - "ATTACH POLICY", - "AUDIT", - "BIND SERVICE ENDPOINT", - "IMPORT SHARE", - "OVERRIDE SHARE RESTRICTIONS", - "PURCHASE DATA EXCHANGE LISTING", - "RESOLVE ALL", - "READ SESSION", - ]; - let with_grant_options = vec!["", " WITH GRANT OPTION"]; - - for p in &privileges { - for wgo in &with_grant_options { - let sql = format!("GRANT {p} ON ACCOUNT TO ROLE role1{wgo}"); - snowflake_and_generic().verified_stmt(&sql); - } - } - - let create_object_types = vec![ - "ACCOUNT", - "APPLICATION", - "APPLICATION PACKAGE", - "COMPUTE POOL", - "DATA EXCHANGE LISTING", - "DATABASE", - "EXTERNAL VOLUME", - "FAILOVER GROUP", - "INTEGRATION", - "NETWORK POLICY", - "ORGANIZATION LISTING", - "REPLICATION GROUP", - "ROLE", - "SHARE", - "USER", - "WAREHOUSE", - ]; - for t in &create_object_types { - for wgo in &with_grant_options { - let sql = format!("GRANT CREATE {t} ON ACCOUNT TO ROLE role1{wgo}"); - snowflake_and_generic().verified_stmt(&sql); - } - } - - let apply_types = vec![ - "AGGREGATION POLICY", - "AUTHENTICATION POLICY", - "JOIN POLICY", - "MASKING POLICY", - "PACKAGES POLICY", - "PASSWORD POLICY", - "PROJECTION POLICY", - "ROW ACCESS POLICY", - "SESSION POLICY", - "TAG", - ]; - for t in &apply_types { - for wgo in &with_grant_options { - let sql = format!("GRANT APPLY {t} ON ACCOUNT TO ROLE role1{wgo}"); - snowflake_and_generic().verified_stmt(&sql); - } - } - - let execute_types = vec![ - "ALERT", - "DATA METRIC FUNCTION", - "MANAGED ALERT", - "MANAGED TASK", - "TASK", - ]; - for t in &execute_types { - for wgo in &with_grant_options { - let sql = format!("GRANT EXECUTE {t} ON ACCOUNT TO ROLE role1{wgo}"); - snowflake_and_generic().verified_stmt(&sql); - } - } - - let manage_types = vec![ - "ACCOUNT SUPPORT CASES", - "EVENT SHARING", - "GRANTS", - "LISTING AUTO FULFILLMENT", - "ORGANIZATION SUPPORT CASES", - "USER SUPPORT CASES", - "WAREHOUSES", - ]; - for t in &manage_types { - for wgo in &with_grant_options { - let sql = format!("GRANT MANAGE {t} ON ACCOUNT TO ROLE role1{wgo}"); - snowflake_and_generic().verified_stmt(&sql); - } - } - - let monitor_types = vec!["EXECUTION", "SECURITY", "USAGE"]; - for t in &monitor_types { - for wgo in &with_grant_options { - let sql = format!("GRANT MONITOR {t} ON ACCOUNT TO ROLE role1{wgo}"); - snowflake_and_generic().verified_stmt(&sql); - } - } -} - -#[test] -fn test_grant_account_object_privileges() { - let privileges = vec![ - "ALL", - "ALL PRIVILEGES", - "APPLYBUDGET", - "MODIFY", - "MONITOR", - "USAGE", - "OPERATE", - ]; - - let objects_types = vec![ - "USER", - "RESOURCE MONITOR", - "WAREHOUSE", - "COMPUTE POOL", - "DATABASE", - "INTEGRATION", - "CONNECTION", - "FAILOVER GROUP", - "REPLICATION GROUP", - "EXTERNAL VOLUME", - ]; - - let with_grant_options = vec!["", " WITH GRANT OPTION"]; - - for t in &objects_types { - for p in &privileges { - for wgo in &with_grant_options { - let sql = format!("GRANT {p} ON {t} obj1 TO ROLE role1{wgo}"); - snowflake_and_generic().verified_stmt(&sql); - } - } - } -} - -#[test] -fn test_grant_role_to() { - snowflake_and_generic().verified_stmt("GRANT ROLE r1 TO ROLE r2"); - snowflake_and_generic().verified_stmt("GRANT ROLE r1 TO USER u1"); -} - -#[test] -fn test_grant_database_role_to() { - snowflake_and_generic().verified_stmt("GRANT DATABASE ROLE r1 TO ROLE r2"); - snowflake_and_generic().verified_stmt("GRANT DATABASE ROLE db1.sc1.r1 TO ROLE db1.sc1.r2"); -} - -#[test] -fn test_alter_session() { - assert_eq!( - snowflake() - .parse_sql_statements("ALTER SESSION SET") - .unwrap_err() - .to_string(), - "sql parser error: expected at least one option" - ); - assert_eq!( - snowflake() - .parse_sql_statements("ALTER SESSION UNSET") - .unwrap_err() - .to_string(), - "sql parser error: expected at least one option" - ); - - snowflake().verified_stmt("ALTER SESSION SET AUTOCOMMIT=TRUE"); - snowflake().verified_stmt("ALTER SESSION SET AUTOCOMMIT=FALSE QUERY_TAG='tag'"); - snowflake().verified_stmt("ALTER SESSION UNSET AUTOCOMMIT"); - snowflake().verified_stmt("ALTER SESSION UNSET AUTOCOMMIT, QUERY_TAG"); - snowflake().one_statement_parses_to( - "ALTER SESSION SET A=false, B='tag';", - "ALTER SESSION SET A=FALSE B='tag'", - ); - snowflake().one_statement_parses_to( - "ALTER SESSION SET A=true \nB='tag'", - "ALTER SESSION SET A=TRUE B='tag'", - ); - snowflake().one_statement_parses_to("ALTER SESSION UNSET a\nB", "ALTER SESSION UNSET a, B"); -} - -#[test] -fn test_alter_session_followed_by_statement() { - let stmts = snowflake() - .parse_sql_statements("ALTER SESSION SET QUERY_TAG='hello'; SELECT 42") - .unwrap(); - match stmts[..] { - [Statement::AlterSession { .. }, Statement::Query { .. }] => {} - _ => panic!("Unexpected statements: {stmts:?}"), - } -} - -#[test] -fn test_nested_join_without_parentheses() { - let query = "SELECT DISTINCT p.product_id FROM orders AS o INNER JOIN customers AS c INNER JOIN products AS p ON p.customer_id = c.customer_id ON c.order_id = o.order_id"; - assert_eq!( - only( - snowflake() - .verified_only_select_with_canonical(query, "SELECT DISTINCT p.product_id FROM orders AS o INNER JOIN (customers AS c INNER JOIN products AS p ON p.customer_id = c.customer_id) ON c.order_id = o.order_id") - .from - ) - .joins, - vec![Join { - relation: TableFactor::NestedJoin { - table_with_joins: Box::new(TableWithJoins { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("customers".to_string())]), - alias: Some(TableAlias { - name: Ident { - value: "c".to_string(), - quote_style: None, - span: Span::empty(), - }, - columns: vec![], - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - }, - joins: vec![Join { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("products".to_string())]), - alias: Some(TableAlias { - name: Ident { - value: "p".to_string(), - quote_style: None, - span: Span::empty(), - }, - columns: vec![], - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - }, - global: false, - join_operator: JoinOperator::Inner(JoinConstraint::On(Expr::BinaryOp { - left: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("p".to_string()), - Ident::new("customer_id".to_string()) - ])), - op: BinaryOperator::Eq, - right: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("c".to_string()), - Ident::new("customer_id".to_string()) - ])), - })), - }] - }), - alias: None - }, - global: false, - join_operator: JoinOperator::Inner(JoinConstraint::On(Expr::BinaryOp { - left: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("c".to_string()), - Ident::new("order_id".to_string()) - ])), - op: BinaryOperator::Eq, - right: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("o".to_string()), - Ident::new("order_id".to_string()) - ])), - })) - }], - ); - - let query = "SELECT DISTINCT p.product_id FROM orders AS o JOIN customers AS c JOIN products AS p ON p.customer_id = c.customer_id ON c.order_id = o.order_id"; - assert_eq!( - only( - snowflake() - .verified_only_select_with_canonical(query, "SELECT DISTINCT p.product_id FROM orders AS o JOIN (customers AS c JOIN products AS p ON p.customer_id = c.customer_id) ON c.order_id = o.order_id") - .from - ) - .joins, - vec![Join { - relation: TableFactor::NestedJoin { - table_with_joins: Box::new(TableWithJoins { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("customers".to_string())]), - alias: Some(TableAlias { - name: Ident { - value: "c".to_string(), - quote_style: None, - span: Span::empty(), - }, - columns: vec![], - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - }, - joins: vec![Join { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("products".to_string())]), - alias: Some(TableAlias { - name: Ident { - value: "p".to_string(), - quote_style: None, - span: Span::empty(), - }, - columns: vec![], - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - }, - global: false, - join_operator: JoinOperator::Join(JoinConstraint::On(Expr::BinaryOp { - left: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("p".to_string()), - Ident::new("customer_id".to_string()) - ])), - op: BinaryOperator::Eq, - right: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("c".to_string()), - Ident::new("customer_id".to_string()) - ])), - })), - }] - }), - alias: None - }, - global: false, - join_operator: JoinOperator::Join(JoinConstraint::On(Expr::BinaryOp { - left: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("c".to_string()), - Ident::new("order_id".to_string()) - ])), - op: BinaryOperator::Eq, - right: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("o".to_string()), - Ident::new("order_id".to_string()) - ])), - })) - }], - ); - - let query = "SELECT DISTINCT p.product_id FROM orders AS o LEFT JOIN customers AS c LEFT JOIN products AS p ON p.customer_id = c.customer_id ON c.order_id = o.order_id"; - assert_eq!( - only( - snowflake() - .verified_only_select_with_canonical(query, "SELECT DISTINCT p.product_id FROM orders AS o LEFT JOIN (customers AS c LEFT JOIN products AS p ON p.customer_id = c.customer_id) ON c.order_id = o.order_id") - .from - ) - .joins, - vec![Join { - relation: TableFactor::NestedJoin { - table_with_joins: Box::new(TableWithJoins { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("customers".to_string())]), - alias: Some(TableAlias { - name: Ident { - value: "c".to_string(), - quote_style: None, - span: Span::empty(), - }, - columns: vec![], - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - }, - joins: vec![Join { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("products".to_string())]), - alias: Some(TableAlias { - name: Ident { - value: "p".to_string(), - quote_style: None, - span: Span::empty(), - }, - columns: vec![], - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - }, - global: false, - join_operator: JoinOperator::Left(JoinConstraint::On(Expr::BinaryOp { - left: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("p".to_string()), - Ident::new("customer_id".to_string()) - ])), - op: BinaryOperator::Eq, - right: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("c".to_string()), - Ident::new("customer_id".to_string()) - ])), - })), - }] - }), - alias: None - }, - global: false, - join_operator: JoinOperator::Left(JoinConstraint::On(Expr::BinaryOp { - left: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("c".to_string()), - Ident::new("order_id".to_string()) - ])), - op: BinaryOperator::Eq, - right: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("o".to_string()), - Ident::new("order_id".to_string()) - ])), - })) - }], - ); - - let query = "SELECT DISTINCT p.product_id FROM orders AS o RIGHT JOIN customers AS c RIGHT JOIN products AS p ON p.customer_id = c.customer_id ON c.order_id = o.order_id"; - assert_eq!( - only( - snowflake() - .verified_only_select_with_canonical(query, "SELECT DISTINCT p.product_id FROM orders AS o RIGHT JOIN (customers AS c RIGHT JOIN products AS p ON p.customer_id = c.customer_id) ON c.order_id = o.order_id") - .from - ) - .joins, - vec![Join { - relation: TableFactor::NestedJoin { - table_with_joins: Box::new(TableWithJoins { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("customers".to_string())]), - alias: Some(TableAlias { - name: Ident { - value: "c".to_string(), - quote_style: None, - span: Span::empty(), - }, - columns: vec![], - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - }, - joins: vec![Join { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("products".to_string())]), - alias: Some(TableAlias { - name: Ident { - value: "p".to_string(), - quote_style: None, - span: Span::empty(), - }, - columns: vec![], - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - }, - global: false, - join_operator: JoinOperator::Right(JoinConstraint::On(Expr::BinaryOp { - left: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("p".to_string()), - Ident::new("customer_id".to_string()) - ])), - op: BinaryOperator::Eq, - right: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("c".to_string()), - Ident::new("customer_id".to_string()) - ])), - })), - }] - }), - alias: None - }, - global: false, - join_operator: JoinOperator::Right(JoinConstraint::On(Expr::BinaryOp { - left: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("c".to_string()), - Ident::new("order_id".to_string()) - ])), - op: BinaryOperator::Eq, - right: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("o".to_string()), - Ident::new("order_id".to_string()) - ])), - })) - }], - ); - - let query = "SELECT DISTINCT p.product_id FROM orders AS o FULL JOIN customers AS c FULL JOIN products AS p ON p.customer_id = c.customer_id ON c.order_id = o.order_id"; - assert_eq!( - only( - snowflake() - .verified_only_select_with_canonical(query, "SELECT DISTINCT p.product_id FROM orders AS o FULL JOIN (customers AS c FULL JOIN products AS p ON p.customer_id = c.customer_id) ON c.order_id = o.order_id") - .from - ) - .joins, - vec![Join { - relation: TableFactor::NestedJoin { - table_with_joins: Box::new(TableWithJoins { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("customers".to_string())]), - alias: Some(TableAlias { - name: Ident { - value: "c".to_string(), - quote_style: None, - span: Span::empty(), - }, - columns: vec![], - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - }, - joins: vec![Join { - relation: TableFactor::Table { - name: ObjectName::from(vec![Ident::new("products".to_string())]), - alias: Some(TableAlias { - name: Ident { - value: "p".to_string(), - quote_style: None, - span: Span::empty(), - }, - columns: vec![], - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - json_path: None, - sample: None, - index_hints: vec![], - }, - global: false, - join_operator: JoinOperator::FullOuter(JoinConstraint::On( - Expr::BinaryOp { - left: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("p".to_string()), - Ident::new("customer_id".to_string()) - ])), - op: BinaryOperator::Eq, - right: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("c".to_string()), - Ident::new("customer_id".to_string()) - ])), - } - )), - }] - }), - alias: None - }, - global: false, - join_operator: JoinOperator::FullOuter(JoinConstraint::On(Expr::BinaryOp { - left: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("c".to_string()), - Ident::new("order_id".to_string()) - ])), - op: BinaryOperator::Eq, - right: Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("o".to_string()), - Ident::new("order_id".to_string()) - ])), - })) - }], - ); -} - -#[test] -fn parse_connect_by_root_operator() { - let sql = "SELECT CONNECT_BY_ROOT name AS root_name FROM Tbl1"; - - match snowflake().verified_stmt(sql) { - Statement::Query(query) => { - assert_eq!( - query.body.as_select().unwrap().projection[0], - SelectItem::ExprWithAlias { - expr: Expr::Prefixed { - prefix: Ident::new("CONNECT_BY_ROOT"), - value: Box::new(Expr::Identifier(Ident::new("name"))) - }, - alias: Ident::new("root_name"), - } - ); - } - _ => unreachable!(), - } - - let sql = "SELECT CONNECT_BY_ROOT name FROM Tbl2"; - match snowflake().verified_stmt(sql) { - Statement::Query(query) => { - assert_eq!( - query.body.as_select().unwrap().projection[0], - SelectItem::UnnamedExpr(Expr::Prefixed { - prefix: Ident::new("CONNECT_BY_ROOT"), - value: Box::new(Expr::Identifier(Ident::new("name"))) - }) - ); - } - _ => unreachable!(), - } - - let sql = "SELECT CONNECT_BY_ROOT FROM Tbl2"; - let res = snowflake().parse_sql_statements(sql); - assert_eq!( - res.unwrap_err().to_string(), - "sql parser error: Expected an expression, found: FROM" - ); -} - -#[test] -fn test_begin_exception_end() { - for sql in [ - "BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE; END", - "BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE EX_1; END", - "BEGIN SELECT 1; EXCEPTION WHEN FOO THEN SELECT 2; WHEN OTHER THEN SELECT 3; RAISE; END", - "BEGIN BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE; END; END", - ] { - snowflake().verified_stmt(sql); - } - - let sql = r#" -DECLARE - EXCEPTION_1 EXCEPTION (-20001, 'I caught the expected exception.'); - EXCEPTION_2 EXCEPTION (-20002, 'Not the expected exception!'); - EXCEPTION_3 EXCEPTION (-20003, 'The worst exception...'); -BEGIN - BEGIN - SELECT 1; - EXCEPTION - WHEN EXCEPTION_1 THEN - SELECT 1; - WHEN EXCEPTION_2 OR EXCEPTION_3 THEN - SELECT 2; - SELECT 3; - WHEN OTHER THEN - SELECT 4; - RAISE; - END; -END -"#; - - // Outer `BEGIN` of the two nested `BEGIN` statements. - let Statement::StartTransaction { mut statements, .. } = snowflake() - .parse_sql_statements(sql) - .unwrap() - .pop() - .unwrap() - else { - unreachable!(); - }; - - // Inner `BEGIN` of the two nested `BEGIN` statements. - let Statement::StartTransaction { - statements, - exception, - has_end_keyword, - .. - } = statements.pop().unwrap() - else { - unreachable!(); - }; - - assert_eq!(1, statements.len()); - assert!(has_end_keyword); - - let exception = exception.unwrap(); - assert_eq!(3, exception.len()); - assert_eq!(1, exception[0].idents.len()); - assert_eq!(1, exception[0].statements.len()); - assert_eq!(2, exception[1].idents.len()); - assert_eq!(2, exception[1].statements.len()); -} - -#[test] -fn test_snowflake_fetch_clause_syntax() { - let canonical = "SELECT c1 FROM fetch_test FETCH FIRST 2 ROWS ONLY"; - snowflake().verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH 2", canonical); - - snowflake() - .verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH FIRST 2", canonical); - snowflake() - .verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH NEXT 2", canonical); - - snowflake() - .verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH 2 ROW", canonical); - - snowflake().verified_only_select_with_canonical( - "SELECT c1 FROM fetch_test FETCH FIRST 2 ROWS", - canonical, - ); -} - -#[test] -fn test_snowflake_create_view_with_multiple_column_options() { - let create_view_with_tag = - r#"CREATE VIEW X (COL WITH TAG (pii='email') COMMENT 'foobar') AS SELECT * FROM Y"#; - snowflake().verified_stmt(create_view_with_tag); -} - -#[test] -fn test_snowflake_create_view_with_composite_tag() { - let create_view_with_tag = - r#"CREATE VIEW X (COL WITH TAG (foo.bar.baz.pii='email')) AS SELECT * FROM Y"#; - snowflake().verified_stmt(create_view_with_tag); -} - -#[test] -fn test_snowflake_create_view_with_composite_policy_name() { - let create_view_with_tag = - r#"CREATE VIEW X (COL WITH MASKING POLICY foo.bar.baz) AS SELECT * FROM Y"#; - snowflake().verified_stmt(create_view_with_tag); -} - -#[test] -fn test_snowflake_identifier_function() { - // Using IDENTIFIER to reference a column - match &snowflake() - .verified_only_select("SELECT identifier('email') FROM customers") - .projection[0] - { - SelectItem::UnnamedExpr(Expr::Function(Function { name, args, .. })) => { - assert_eq!(*name, ObjectName::from(vec![Ident::new("identifier")])); - assert_eq!( - *args, - FunctionArguments::List(FunctionArgumentList { - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - Value::SingleQuotedString("email".to_string()).into() - )))], - clauses: vec![], - duplicate_treatment: None - }) - ); - } - _ => unreachable!(), - } - - // Using IDENTIFIER to reference a case-sensitive column - match &snowflake() - .verified_only_select(r#"SELECT identifier('"Email"') FROM customers"#) - .projection[0] - { - SelectItem::UnnamedExpr(Expr::Function(Function { name, args, .. })) => { - assert_eq!(*name, ObjectName::from(vec![Ident::new("identifier")])); - assert_eq!( - *args, - FunctionArguments::List(FunctionArgumentList { - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - Value::SingleQuotedString("\"Email\"".to_string()).into() - )))], - clauses: vec![], - duplicate_treatment: None - }) - ); - } - _ => unreachable!(), - } - - // Using IDENTIFIER to reference an alias of a table - match &snowflake() - .verified_only_select("SELECT identifier('alias1').* FROM tbl AS alias1") - .projection[0] - { - SelectItem::QualifiedWildcard( - SelectItemQualifiedWildcardKind::Expr(Expr::Function(Function { name, args, .. })), - _, - ) => { - assert_eq!(*name, ObjectName::from(vec![Ident::new("identifier")])); - assert_eq!( - *args, - FunctionArguments::List(FunctionArgumentList { - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - Value::SingleQuotedString("alias1".to_string()).into() - )))], - clauses: vec![], - duplicate_treatment: None - }) - ); - } - _ => unreachable!(), - } - - // Using IDENTIFIER to reference a database - match snowflake().verified_stmt("CREATE DATABASE IDENTIFIER('tbl')") { - Statement::CreateDatabase { db_name, .. } => { - assert_eq!( - db_name, - ObjectName(vec![ObjectNamePart::Function(ObjectNamePartFunction { - name: Ident::new("IDENTIFIER"), - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - Value::SingleQuotedString("tbl".to_string()).into() - )))] - })]) - ); - } - _ => unreachable!(), - } - - // Using IDENTIFIER to reference a schema - match snowflake().verified_stmt("CREATE SCHEMA IDENTIFIER('db1.sc1')") { - Statement::CreateSchema { schema_name, .. } => { - assert_eq!( - schema_name, - SchemaName::Simple(ObjectName(vec![ObjectNamePart::Function( - ObjectNamePartFunction { - name: Ident::new("IDENTIFIER"), - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - Value::SingleQuotedString("db1.sc1".to_string()).into() - )))] - } - )])) - ); - } - _ => unreachable!(), - } - - // Using IDENTIFIER to reference a table - match snowflake().verified_stmt("CREATE TABLE IDENTIFIER('tbl') (id INT)") { - Statement::CreateTable(CreateTable { name, .. }) => { - assert_eq!( - name, - ObjectName(vec![ObjectNamePart::Function(ObjectNamePartFunction { - name: Ident::new("IDENTIFIER"), - args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - Value::SingleQuotedString("tbl".to_string()).into() - )))] - })]) - ); - } - _ => unreachable!(), - } - - // Cannot have more than one IDENTIFIER part in an object name - assert_eq!( - snowflake() - .parse_sql_statements( - "CREATE TABLE IDENTIFIER('db1').IDENTIFIER('sc1').IDENTIFIER('tbl') (id INT)" - ) - .is_err(), - true - ); - assert_eq!( - snowflake() - .parse_sql_statements("CREATE TABLE IDENTIFIER('db1')..IDENTIFIER('tbl') (id INT)") - .is_err(), - true - ); -} diff --git a/tests/sqlparser_sqlite.rs b/tests/sqlparser_sqlite.rs index 06496f0c..6f8bbb2d 100644 --- a/tests/sqlparser_sqlite.rs +++ b/tests/sqlparser_sqlite.rs @@ -214,6 +214,7 @@ fn parse_create_table_auto_increment() { vec![ColumnDef { name: "bar".into(), data_type: DataType::Int(None), + collation: None, options: vec![ ColumnOptionDef { name: None, @@ -242,6 +243,7 @@ fn parse_create_table_primary_key_asc_desc() { let expected_column_def = |kind| ColumnDef { name: "bar".into(), data_type: DataType::Int(None), + collation: None, options: vec![ ColumnOptionDef { name: None, @@ -284,11 +286,13 @@ fn parse_create_sqlite_quote() { ColumnDef { name: Ident::with_quote('"', "KEY"), data_type: DataType::Int(None), + collation: None, options: vec![], }, ColumnDef { name: Ident::with_quote('[', "INDEX"), data_type: DataType::Int(None), + collation: None, options: vec![], }, ], @@ -324,7 +328,7 @@ fn parse_create_table_on_conflict_col() { Keyword::IGNORE, Keyword::REPLACE, ] { - let sql = format!("CREATE TABLE t1 (a INT, b INT ON CONFLICT {keyword:?})"); + let sql = format!("CREATE TABLE t1 (a INT, b INT ON CONFLICT {:?})", keyword); match sqlite_and_generic().verified_stmt(&sql) { Statement::CreateTable(CreateTable { columns, .. }) => { assert_eq!( @@ -369,9 +373,7 @@ fn test_placeholder() { let ast = sqlite().verified_only_select(sql); assert_eq!( ast.projection[0], - UnnamedExpr(Expr::Value( - (Value::Placeholder("@xxx".into())).with_empty_span() - )), + UnnamedExpr(Expr::Value(Value::Placeholder("@xxx".into()))), ); } @@ -410,14 +412,13 @@ fn parse_window_function_with_filter() { "count", "user_defined_function", ] { - let sql = format!("SELECT {func_name}(x) FILTER (WHERE y) OVER () FROM t"); + let sql = format!("SELECT {}(x) FILTER (WHERE y) OVER () FROM t", func_name); let select = sqlite().verified_only_select(&sql); assert_eq!(select.to_string(), sql); assert_eq!( select.projection, vec![SelectItem::UnnamedExpr(Expr::Function(Function { - name: ObjectName::from(vec![Ident::new(func_name)]), - uses_odbc_syntax: false, + name: ObjectName(vec![Ident::new(func_name)]), parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -444,15 +445,11 @@ fn parse_window_function_with_filter() { fn parse_attach_database() { let sql = "ATTACH DATABASE 'test.db' AS test"; let verified_stmt = sqlite().verified_stmt(sql); - assert_eq!(sql, format!("{verified_stmt}")); + assert_eq!(sql, format!("{}", verified_stmt)); match verified_stmt { Statement::AttachDatabase { schema_name, - database_file_name: - Expr::Value(ValueWithSpan { - value: Value::SingleQuotedString(literal_name), - span: _, - }), + database_file_name: Expr::Value(Value::SingleQuotedString(literal_name)), database: true, } => { assert_eq!(schema_name.value, "test"); @@ -468,20 +465,27 @@ fn parse_update_tuple_row_values() { assert_eq!( sqlite().verified_stmt("UPDATE x SET (a, b) = (1, 2)"), Statement::Update { - or: None, assignments: vec![Assignment { target: AssignmentTarget::Tuple(vec![ - ObjectName::from(vec![Ident::new("a"),]), - ObjectName::from(vec![Ident::new("b"),]), + ObjectName(vec![Ident::new("a"),]), + ObjectName(vec![Ident::new("b"),]), ]), value: Expr::Tuple(vec![ - Expr::Value((Value::Number("1".parse().unwrap(), false)).with_empty_span()), - Expr::Value((Value::Number("2".parse().unwrap(), false)).with_empty_span()) + Expr::Value(Value::Number("1".parse().unwrap(), false)), + Expr::Value(Value::Number("2".parse().unwrap(), false)) ]) }], selection: None, table: TableWithJoins { - relation: table_from_name(ObjectName::from(vec![Ident::new("x")])), + relation: TableFactor::Table { + name: ObjectName(vec![Ident::new("x")]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + }, joins: vec![], }, from: None, @@ -521,9 +525,32 @@ fn parse_start_transaction_with_modifier() { sqlite_and_generic().verified_stmt("BEGIN DEFERRED TRANSACTION"); sqlite_and_generic().verified_stmt("BEGIN IMMEDIATE TRANSACTION"); sqlite_and_generic().verified_stmt("BEGIN EXCLUSIVE TRANSACTION"); - sqlite_and_generic().verified_stmt("BEGIN DEFERRED"); - sqlite_and_generic().verified_stmt("BEGIN IMMEDIATE"); - sqlite_and_generic().verified_stmt("BEGIN EXCLUSIVE"); + sqlite_and_generic().one_statement_parses_to("BEGIN DEFERRED", "BEGIN DEFERRED TRANSACTION"); + sqlite_and_generic().one_statement_parses_to("BEGIN IMMEDIATE", "BEGIN IMMEDIATE TRANSACTION"); + sqlite_and_generic().one_statement_parses_to("BEGIN EXCLUSIVE", "BEGIN EXCLUSIVE TRANSACTION"); + + let unsupported_dialects = TestedDialects::new( + all_dialects() + .dialects + .into_iter() + .filter(|x| !(x.is::() || x.is::())) + .collect(), + ); + let res = unsupported_dialects.parse_sql_statements("BEGIN DEFERRED"); + assert_eq!( + ParserError::ParserError("Expected: end of statement, found: DEFERRED".to_string()), + res.unwrap_err(), + ); + let res = unsupported_dialects.parse_sql_statements("BEGIN IMMEDIATE"); + assert_eq!( + ParserError::ParserError("Expected: end of statement, found: IMMEDIATE".to_string()), + res.unwrap_err(), + ); + let res = unsupported_dialects.parse_sql_statements("BEGIN EXCLUSIVE"); + assert_eq!( + ParserError::ParserError("Expected: end of statement, found: EXCLUSIVE".to_string()), + res.unwrap_err(), + ); } #[test] @@ -536,60 +563,10 @@ fn test_dollar_identifier_as_placeholder() { Expr::BinaryOp { op, left, right } => { assert_eq!(op, BinaryOperator::Eq); assert_eq!(left, Box::new(Expr::Identifier(Ident::new("id")))); - assert_eq!( - right, - Box::new(Expr::Value( - (Placeholder("$id".to_string())).with_empty_span() - )) - ); + assert_eq!(right, Box::new(Expr::Value(Placeholder("$id".to_string())))); } _ => unreachable!(), } - - // $$ is a valid placeholder in SQLite - match sqlite().verified_expr("id = $$") { - Expr::BinaryOp { op, left, right } => { - assert_eq!(op, BinaryOperator::Eq); - assert_eq!(left, Box::new(Expr::Identifier(Ident::new("id")))); - assert_eq!( - right, - Box::new(Expr::Value( - (Placeholder("$$".to_string())).with_empty_span() - )) - ); - } - _ => unreachable!(), - } -} - -#[test] -fn test_match_operator() { - assert_eq!( - sqlite().verified_expr("col MATCH 'pattern'"), - Expr::BinaryOp { - op: BinaryOperator::Match, - left: Box::new(Expr::Identifier(Ident::new("col"))), - right: Box::new(Expr::Value( - (Value::SingleQuotedString("pattern".to_string())).with_empty_span() - )) - } - ); - sqlite().verified_only_select("SELECT * FROM email WHERE email MATCH 'fts5'"); -} - -#[test] -fn test_regexp_operator() { - assert_eq!( - sqlite().verified_expr("col REGEXP 'pattern'"), - Expr::BinaryOp { - op: BinaryOperator::Regexp, - left: Box::new(Expr::Identifier(Ident::new("col"))), - right: Box::new(Expr::Value( - (Value::SingleQuotedString("pattern".to_string())).with_empty_span() - )) - } - ); - sqlite().verified_only_select(r#"SELECT count(*) FROM messages WHERE msg_text REGEXP '\d+'"#); } fn sqlite() -> TestedDialects {