mirror of
https://github.com/apache/datafusion-sqlparser-rs.git
synced 2025-07-09 18:04:59 +00:00
Compare commits
No commits in common. "main" and "v0.24.0" have entirely different histories.
122 changed files with 10883 additions and 84541 deletions
38
.asf.yaml
38
.asf.yaml
|
@ -1,38 +0,0 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# This file controls the settings of this repository
|
||||
#
|
||||
# See more details at
|
||||
# https://cwiki.apache.org/confluence/display/INFRA/Git+-+.asf.yaml+features
|
||||
|
||||
notifications:
|
||||
commits: commits@datafusion.apache.org
|
||||
issues: github@datafusion.apache.org
|
||||
pullrequests: github@datafusion.apache.org
|
||||
github:
|
||||
description: "Extensible SQL Lexer and Parser for Rust"
|
||||
labels:
|
||||
- big-data
|
||||
- rust
|
||||
- sql
|
||||
enabled_merge_buttons:
|
||||
squash: true
|
||||
merge: false
|
||||
rebase: false
|
||||
features:
|
||||
issues: true
|
42
.github/actions/setup-builder/action.yaml
vendored
42
.github/actions/setup-builder/action.yaml
vendored
|
@ -1,42 +0,0 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
name: Prepare Rust Builder
|
||||
description: 'Prepare Rust Build Environment'
|
||||
inputs:
|
||||
rust-version:
|
||||
description: 'version of rust to install (e.g. stable)'
|
||||
required: true
|
||||
default: 'stable'
|
||||
targets:
|
||||
description: 'The toolchain targets to add, comma-separated'
|
||||
default: ''
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Setup Rust Toolchain
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Installing ${{ inputs.rust-version }}"
|
||||
if [ -n "${{ inputs.targets}}" ]; then
|
||||
rustup toolchain install ${{ inputs.rust-version }} -t ${{ inputs.targets }}
|
||||
else
|
||||
rustup toolchain install ${{ inputs.rust-version }}
|
||||
fi
|
||||
rustup default ${{ inputs.rust-version }}
|
||||
rustup component add rustfmt clippy
|
17
.github/dependabot.yml
vendored
17
.github/dependabot.yml
vendored
|
@ -1,20 +1,3 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: cargo
|
||||
|
|
39
.github/workflows/license.yml
vendored
39
.github/workflows/license.yml
vendored
|
@ -1,39 +0,0 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
name: license
|
||||
|
||||
# trigger for all PRs and changes to main
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
|
||||
rat:
|
||||
name: Release Audit Tool (RAT)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.8
|
||||
- name: Audit licenses
|
||||
run: ./dev/release/run-rat.sh .
|
107
.github/workflows/rust.yml
vendored
107
.github/workflows/rust.yml
vendored
|
@ -1,79 +1,48 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
name: Rust
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
|
||||
codestyle:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Rust Toolchain
|
||||
uses: ./.github/actions/setup-builder
|
||||
- run: cargo fmt --all -- --check
|
||||
- name: Set up Rust
|
||||
uses: hecrj/setup-rust-action@v1
|
||||
with:
|
||||
components: rustfmt
|
||||
# Note that `nightly` is required for `license_template_path`, as
|
||||
# it's an unstable feature.
|
||||
rust-version: nightly
|
||||
- uses: actions/checkout@v2
|
||||
- run: cargo +nightly fmt -- --check --config-path <(echo 'license_template_path = "HEADER"')
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Rust Toolchain
|
||||
uses: ./.github/actions/setup-builder
|
||||
- name: Set up Rust
|
||||
uses: hecrj/setup-rust-action@v1
|
||||
with:
|
||||
components: clippy
|
||||
- uses: actions/checkout@v2
|
||||
- run: cargo clippy --all-targets --all-features -- -D warnings
|
||||
|
||||
benchmark-lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Rust Toolchain
|
||||
uses: ./.github/actions/setup-builder
|
||||
- run: cd sqlparser_bench && cargo clippy --all-targets --all-features -- -D warnings
|
||||
|
||||
compile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Rust Toolchain
|
||||
uses: ./.github/actions/setup-builder
|
||||
- name: Set up Rust
|
||||
uses: hecrj/setup-rust-action@v1
|
||||
- uses: actions/checkout@master
|
||||
- run: cargo check --all-targets --all-features
|
||||
|
||||
docs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RUSTDOCFLAGS: "-Dwarnings"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Rust Toolchain
|
||||
uses: ./.github/actions/setup-builder
|
||||
- run: cargo doc --document-private-items --no-deps --workspace --all-features
|
||||
|
||||
compile-no-std:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Rust Toolchain
|
||||
uses: ./.github/actions/setup-builder
|
||||
- name: Set up Rust
|
||||
uses: hecrj/setup-rust-action@v1
|
||||
with:
|
||||
targets: 'thumbv6m-none-eabi'
|
||||
- uses: actions/checkout@master
|
||||
- run: cargo check --no-default-features --target thumbv6m-none-eabi
|
||||
|
||||
test:
|
||||
|
@ -82,14 +51,38 @@ jobs:
|
|||
rust: [stable, beta, nightly]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Rust Toolchain
|
||||
uses: ./.github/actions/setup-builder
|
||||
- name: Setup Rust
|
||||
uses: hecrj/setup-rust-action@v1
|
||||
with:
|
||||
rust-version: ${{ matrix.rust }}
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Install Tarpaulin
|
||||
run: cargo install cargo-tarpaulin
|
||||
uses: actions-rs/install@v0.1
|
||||
with:
|
||||
crate: cargo-tarpaulin
|
||||
version: 0.14.2
|
||||
use-tool-cache: true
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Test
|
||||
run: cargo test --all-features
|
||||
- name: Coverage
|
||||
if: matrix.rust == 'stable'
|
||||
run: cargo tarpaulin -o Lcov --output-dir ./coverage
|
||||
- name: Coveralls
|
||||
if: matrix.rust == 'stable'
|
||||
uses: coverallsapp/github-action@master
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
publish-crate:
|
||||
if: startsWith(github.ref, 'refs/tags/v0')
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test]
|
||||
steps:
|
||||
- name: Set up Rust
|
||||
uses: hecrj/setup-rust-action@v1
|
||||
- uses: actions/checkout@v2
|
||||
- name: Publish
|
||||
shell: bash
|
||||
run: |
|
||||
cargo publish --token ${{ secrets.CRATES_TOKEN }}
|
||||
|
|
38
.github/workflows/stale.yml
vendored
38
.github/workflows/stale.yml
vendored
|
@ -1,38 +0,0 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
name: "Close stale PRs"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 1 * * *"
|
||||
|
||||
jobs:
|
||||
close-stale-prs:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
stale-pr-message: "Thank you for your contribution. Unfortunately, this pull request is stale because it has been open 60 days with no activity. Please remove the stale label or comment or this will be closed in 7 days."
|
||||
days-before-pr-stale: 60
|
||||
days-before-pr-close: 7
|
||||
# do not close stale issues
|
||||
days-before-issue-stale: -1
|
||||
days-before-issue-close: -1
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -2,8 +2,6 @@
|
|||
# will have compiled files and executables
|
||||
/target/
|
||||
/sqlparser_bench/target/
|
||||
/derive/target/
|
||||
dev/dist
|
||||
|
||||
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||
# More information here http://doc.crates.io/guide.html#cargotoml-vs-cargolock
|
||||
|
@ -15,7 +13,3 @@ Cargo.lock
|
|||
# IDEs
|
||||
.idea
|
||||
.vscode
|
||||
|
||||
*.swp
|
||||
|
||||
.DS_store
|
|
@ -1 +0,0 @@
|
|||
rust 1.75.0
|
520
CHANGELOG.md
520
CHANGELOG.md
|
@ -1,36 +1,494 @@
|
|||
<!---
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Changelog
|
||||
All notable changes to this project will be documented in one of the linked
|
||||
files.
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project aims to adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
Given that the parser produces a typed AST, any changes to the AST will
|
||||
technically be breaking and thus will result in a `0.(N+1)` version.
|
||||
Given that the parser produces a typed AST, any changes to the AST will technically be breaking and thus will result in a `0.(N+1)` version. We document changes that break via addition as "Added".
|
||||
|
||||
## [Unreleased]
|
||||
Check https://github.com/sqlparser-rs/sqlparser-rs/commits/main for undocumented changes.
|
||||
|
||||
## [0.24.0] 2022-09-29
|
||||
|
||||
### Added
|
||||
|
||||
* Support `MILLENNIUM` (2 Ns) (#633) - Thanks @sarahyurick
|
||||
* Support `MEDIUMINT` (#630) - Thanks @AugustoFKL
|
||||
* Support `DOUBLE PRECISION` (#629) - Thanks @AugustoFKL
|
||||
* Support precision in `CLOB`, `BINARY`, `VARBINARY`, `BLOB` data type (#618) - Thanks @ding-young
|
||||
* Support `CREATE ROLE` and `DROP ROLE` (#598) - Thanks @blx
|
||||
* Support full range of sqlite prepared statement placeholders (#604) - Thanks @lovasoa
|
||||
* Support National string literal with lower case `n` (#612) - Thanks @mskrzypkows
|
||||
* Support SHOW FUNCTIONS (#620) - Thanks @joocer
|
||||
* Support `set time zone to 'some-timezone'` (#617) - Thanks @waitingkuo
|
||||
|
||||
### Changed
|
||||
* Move `Value::Interval` to `Expr::Interval` (#609) - Thanks @ding-young
|
||||
* Update `criterion` dev-requirement from 0.3 to 0.4 in /sqlparser_bench (#611) - Thanks @dependabot
|
||||
* Box `Query` in `Cte` (#572) - Thanks @MazterQyou
|
||||
|
||||
### Other
|
||||
* Disambiguate CREATE ROLE ... USER and GROUP (#628) - Thanks @alamb
|
||||
* Add test for optional WITH in CREATE ROLE (#627) - Thanks @alamb
|
||||
|
||||
## [0.23.0] 2022-09-08
|
||||
|
||||
### Added
|
||||
* Add support for aggregate expressions with filters (#585) - Thanks @andygrove
|
||||
* Support `LOCALTIME` and `LOCALTIMESTAMP` time functions (#592) - Thanks @MazterQyou
|
||||
|
||||
## [0.22.0] 2022-08-26
|
||||
|
||||
### Added
|
||||
* Support `OVERLAY` expressions (#594) - Thanks @ayushg
|
||||
* Support `WITH TIMEZONE` and `WITHOUT TIMEZONE` when parsing `TIMESTAMP` expressions (#589) - Thanks @waitingkuo
|
||||
* Add ability for dialects to override prefix, infix, and statement parsing (#581) - Thanks @andygrove
|
||||
|
||||
## [0.21.0] 2022-08-18
|
||||
|
||||
### Added
|
||||
* Support `IS [NOT] TRUE`, `IS [NOT] FALSE`, and `IS [NOT] UNKNOWN` - Thanks (#583) @sarahyurick
|
||||
* Support `SIMILAR TO` syntax (#569) - Thanks @ayushdg
|
||||
* Support `SHOW COLLATION` (#564) - Thanks @MazterQyou
|
||||
* Support `SHOW TABLES` (#563) - Thanks @MazterQyou
|
||||
* Support `SET NAMES literal [COLLATE literal]` (#558) - Thanks @ovr
|
||||
* Support trailing commas (#557) in `BigQuery` dialect - Thanks @komukomo
|
||||
* Support `USE <DB>` (#565) - Thanks @MazterQyou
|
||||
* Support `SHOW COLUMNS FROM tbl FROM db` (#562) - Thanks @MazterQyou
|
||||
* Support `SHOW VARIABLES` for `MySQL` dialect (#559) - Thanks @ovr and @vasilev-alex
|
||||
|
||||
### Changed
|
||||
* Support arbitrary expression in `SET` statement (#574) - Thanks @ovr and @vasilev-alex
|
||||
* Parse LIKE patterns as Expr not Value (#579) - Thanks @andygrove
|
||||
* Update Ballista link in README (#576) - Thanks @sanxiyn
|
||||
* Parse `TRIM` from with optional expr and `FROM` expr (#573) - Thanks @ayushdg
|
||||
* Support PostgreSQL array subquery constructor (#566) - Thanks @MazterQyou
|
||||
* Clarify contribution licensing (#570) - Thanks @alamb
|
||||
* Update for new clippy ints (#571) - Thanks @alamb
|
||||
* Change `Like` and `ILike` to `Expr` variants, allow escape char (#569) - Thanks @ayushdg
|
||||
* Parse special keywords as functions (`current_user`, `user`, etc) (#561) - Thanks @ovr
|
||||
* Support expressions in `LIMIT`/`OFFSET` (#567) - Thanks @MazterQyou
|
||||
|
||||
## [0.20.0] 2022-08-05
|
||||
|
||||
### Added
|
||||
* Support custom `OPERATOR` postgres syntax (#548) - Thanks @iskakaushik
|
||||
* Support `SAFE_CAST` for BigQuery (#552) - Thanks @togami2864
|
||||
|
||||
### Changed
|
||||
* Added SECURITY.md (#546) - Thanks @JamieSlome
|
||||
* Allow `>>` and `<<` binary operators in Generic dialect (#553) - Thanks @ovr
|
||||
* Allow `NestedJoin` with an alias (#551) - Thanks @waitingkuo
|
||||
|
||||
## [0.19.0] 2022-07-28
|
||||
|
||||
### Added
|
||||
|
||||
* Support `ON CLUSTER` for `CREATE TABLE` statement (ClickHouse DDL) (#527) - Thanks @andyrichardson
|
||||
* Support empty `ARRAY` literals (#532) - Thanks @bitemyapp
|
||||
* Support `AT TIME ZONE` clause (#539) - Thanks @bitemyapp
|
||||
* Support `USING` clause and table aliases in `DELETE` (#541) - Thanks @mobuchowski
|
||||
* Support `SHOW CREATE VIEW` statement (#536) - Thanks @mrob95
|
||||
* Support `CLONE` clause in `CREATE TABLE` statements (#542) - Thanks @mobuchowski
|
||||
* Support `WITH OFFSET Alias` in table references (#528) - Thanks @sivchari
|
||||
* Support double quoted (`"`) literal strings: (#530) - Thanks @komukomo
|
||||
* Support `ON UPDATE` clause on column definitions in `CREATE TABLE` statements (#522) - Thanks @frolovdev
|
||||
|
||||
|
||||
- Unreleased: Check https://github.com/sqlparser-rs/sqlparser-rs/commits/main for undocumented changes.
|
||||
- `0.56.0`: [changelog/0.56.0.md](changelog/0.56.0.md)
|
||||
- `0.55.0`: [changelog/0.55.0.md](changelog/0.55.0.md)
|
||||
- `0.54.0`: [changelog/0.54.0.md](changelog/0.54.0.md)
|
||||
- `0.53.0`: [changelog/0.53.0.md](changelog/0.53.0.md)
|
||||
- `0.52.0`: [changelog/0.52.0.md](changelog/0.52.0.md)
|
||||
- `0.51.0` and earlier: [changelog/0.51.0-pre.md](changelog/0.51.0-pre.md)
|
||||
### Changed:
|
||||
|
||||
* `Box`ed `Query` body to save stack space (#540) - Thanks @5tan
|
||||
* Distinguish between `INT` and `INTEGER` types (#525) - Thanks @frolovdev
|
||||
* Parse `WHERE NOT EXISTS` as `Expr::Exists` rather than `Expr::UnaryOp` for consistency (#523) - Thanks @frolovdev
|
||||
* Support `Expr` instead of `String` for argument to `INTERVAL` (#517) - Thanks @togami2864
|
||||
|
||||
### Fixed:
|
||||
|
||||
* Report characters instead of bytes in error messages (#529) - Thanks @michael-2956
|
||||
|
||||
|
||||
## [0.18.0] 2022-06-06
|
||||
|
||||
### Added
|
||||
|
||||
* Support `CLOSE` (cursors) (#515) - Thanks @ovr
|
||||
* Support `DECLARE` (cursors) (#509) - Thanks @ovr
|
||||
* Support `FETCH` (cursors) (#510) - Thanks @ovr
|
||||
* Support `DATETIME` keyword (#512) - Thanks @komukomo
|
||||
* Support `UNNEST` as a table factor (#493) - Thanks @sivchari
|
||||
* Support `CREATE FUNCTION` (hive flavor) (#496) - Thanks @mobuchowski
|
||||
* Support placeholders (`$` or `?`) in `LIMIT` clause (#494) - Thanks @step-baby
|
||||
* Support escaped string literals (PostgreSQL) (#502) - Thanks @ovr
|
||||
* Support `IS TRUE` and `IS FALSE` (#499) - Thanks @ovr
|
||||
* Support `DISCARD [ALL | PLANS | SEQUENCES | TEMPORARY | TEMP]` (#500) - Thanks @gandronchik
|
||||
* Support `array<..>` HIVE data types (#491) - Thanks @mobuchowski
|
||||
* Support `SET` values that begin with `-` #495 - Thanks @mobuchowski
|
||||
* Support unicode whitespace (#482) - Thanks @alexsatori
|
||||
* Support `BigQuery` dialect (#490) - Thanks @komukomo
|
||||
|
||||
### Changed:
|
||||
* Add docs for MapAccess (#489) - Thanks @alamb
|
||||
* Rename `ArrayIndex::indexs` to `ArrayIndex::indexes` (#492) - Thanks @alamb
|
||||
|
||||
### Fixed:
|
||||
* Fix escaping of trailing quote in quoted identifiers (#505) - Thanks @razzolini-qpq
|
||||
* Fix parsing of `COLLATE` after parentheses in expressions (#507) - Thanks @razzolini-qpq
|
||||
* Distinguish tables and nullary functions in `FROM` (#506) - Thanks @razzolini-qpq
|
||||
* Fix `MERGE INTO` semicolon handling (#508) - Thanks @mskrzypkows
|
||||
|
||||
## [0.17.0] 2022-05-09
|
||||
|
||||
### Added
|
||||
|
||||
* Support `#` as first character in field name for `RedShift` dialect (#485) - Thanks @yuval-illumex
|
||||
* Support for postgres composite types (#466) - Thanks @poonai
|
||||
* Support `TABLE` keyword with SELECT INTO (#487) - Thanks @MazterQyou
|
||||
* Support `ANY`/`ALL` operators (#477) - Thanks @ovr
|
||||
* Support `ArrayIndex` in `GenericDialect` (#480) - Thanks @ovr
|
||||
* Support `Redshift` dialect, handle square brackets properly (#471) - Thanks @mskrzypkows
|
||||
* Support `KILL` statement (#479) - Thanks @ovr
|
||||
* Support `QUALIFY` clause on `SELECT` for `Snowflake` dialect (#465) - Thanks @mobuchowski
|
||||
* Support `POSITION(x IN y)` function syntax (#463) @yuval-illumex
|
||||
* Support `global`,`local`, `on commit` for `create temporary table` (#456) - Thanks @gandronchik
|
||||
* Support `NVARCHAR` data type (#462) - Thanks @yuval-illumex
|
||||
* Support for postgres json operators `->`, `->>`, `#>`, and `#>>` (#458) - Thanks @poonai
|
||||
* Support `SET ROLE` statement (#455) - Thanks @slhmy
|
||||
|
||||
### Changed:
|
||||
* Improve docstrings for `KILL` statement (#481) - Thanks @alamb
|
||||
* Add negative tests for `POSITION` (#469) - Thanks @alamb
|
||||
* Add negative tests for `IN` parsing (#468) - Thanks @alamb
|
||||
* Suppport table names (as well as subqueries) as source in `MERGE` statements (#483) - Thanks @mskrzypkows
|
||||
|
||||
|
||||
### Fixed:
|
||||
* `INTO` keyword is optional for `INSERT`, `MERGE` (#473) - Thanks @mobuchowski
|
||||
* Support `IS TRUE` and `IS FALSE` expressions in boolean filter (#474) - Thanks @yuval-illumex
|
||||
* Support fully qualified object names in `SET VARIABLE` (#484) - Thanks mobuchowski
|
||||
|
||||
## [0.16.0] 2022-04-03
|
||||
|
||||
### Added
|
||||
|
||||
* Support `WEEK` keyword in `EXTRACT` (#436) - Thanks @Ted-Jiang
|
||||
* Support `MERGE` statement (#430) - Thanks @mobuchowski
|
||||
* Support `SAVEPOINT` statement (#438) - Thanks @poonai
|
||||
* Support `TO` clause in `COPY` (#441) - Thanks @matthewmturner
|
||||
* Support `CREATE DATABASE` statement (#451) - Thanks @matthewmturner
|
||||
* Support `FROM` clause in `UPDATE` statement (#450) - Thanks @slhmy
|
||||
* Support additional `COPY` options (#446) - Thanks @wangrunji0408
|
||||
|
||||
### Fixed:
|
||||
* Bug in array / map access parsing (#433) - Thanks @monadbobo
|
||||
|
||||
## [0.15.0] 2022-03-07
|
||||
|
||||
### Added
|
||||
|
||||
* Support for ClickHouse array types (e.g. [1,2,3]) (#429) - Thanks @monadbobo
|
||||
* Support for `unsigned tinyint`, `unsigned int`, `unsigned smallint` and `unsigned bigint` datatypes (#428) - Thanks @watarukura
|
||||
* Support additional keywords for `EXTRACT` (#427) - Thanks @mobuchowski
|
||||
* Support IN UNNEST(expression) (#426) - Thanks @komukomo
|
||||
* Support COLLATION keywork on CREATE TABLE (#424) - Thanks @watarukura
|
||||
* Support FOR UPDATE/FOR SHARE clause (#418) - Thanks @gamife
|
||||
* Support prepared statement placeholder arg `?` and `$` (#420) - Thanks @gamife
|
||||
* Support array expressions such as `ARRAY[1,2]` , `foo[1]` and `INT[][]` (#419) - Thanks @gamife
|
||||
|
||||
### Changed:
|
||||
* remove Travis CI (#421) - Thanks @efx
|
||||
|
||||
### Fixed:
|
||||
* Allow `array` to be used as a function name again (#432) - @alamb
|
||||
* Update docstring reference to `Query` (#423) - Thanks @max-sixty
|
||||
|
||||
## [0.14.0] 2022-02-09
|
||||
|
||||
### Added
|
||||
* Support `CURRENT_TIMESTAMP`, `CURRENT_TIME`, and `CURRENT_DATE` (#391) - Thanks @yuval-illumex
|
||||
* SUPPORT `SUPER` keyword (#387) - Thanks @flaneur2020
|
||||
* Support differing orders of `OFFSET` `LIMIT` as well as `LIMIT` `OFFSET` (#413) - Thanks @yuval-illumex
|
||||
* Support for `FROM <filename>`, `DELIMITER`, and `CSV HEADER` options for `COPY` command (#409) - Thanks @poonai
|
||||
* Support `CHARSET` and `ENGINE` clauses on `CREATE TABLE` for mysql (#392) - Thanks @antialize
|
||||
* Support `DROP CONSTRAINT [ IF EXISTS ] <name> [ CASCADE ]` (#396) - Thanks @tvallotton
|
||||
* Support parsing tuples and add `Expr::Tuple` (#414) - @alamb
|
||||
* Support MySQL style `LIMIT X, Y` (#415) - @alamb
|
||||
* Support `SESSION TRANSACTION` and `TRANSACTION SNAPSHOT`. (#379) - Thanks @poonai
|
||||
* Support `ALTER COLUMN` and `RENAME CONSTRAINT` (#381) - Thanks @zhamlin
|
||||
* Support for Map access, add ClickHouse dialect (#382) - Thanks @monadbobo
|
||||
|
||||
### Changed
|
||||
* Restrict where wildcard (`*`) can appear, add to `FunctionArgExpr` remove `Expr::[Qualified]Wildcard`, (#378) - Thanks @panarch
|
||||
* Update simple_logger requirement from 1.9 to 2.1 (#403)
|
||||
* export all methods of parser (#397) - Thanks @neverchanje!
|
||||
* Clarify maintenance status on README (#416) - @alamb
|
||||
|
||||
### Fixed
|
||||
* Fix new clippy errors (#412) - @alamb
|
||||
* Fix panic with `GRANT/REVOKE` in `CONNECT`, `CREATE`, `EXECUTE` or `TEMPORARY` - Thanks @evgenyx00
|
||||
* Handle double quotes inside quoted identifiers correctly (#411) - Thanks @Marwes
|
||||
* Handle mysql backslash escaping (#373) - Thanks @vasilev-alex
|
||||
|
||||
## [0.13.0] 2021-12-10
|
||||
|
||||
### Added
|
||||
* Add ALTER TABLE CHANGE COLUMN, extend the UPDATE statement with ON clause (#375) - Thanks @0xA537FD!
|
||||
* Add support for GROUPIING SETS, ROLLUP and CUBE - Thanks @Jimexist!
|
||||
* Add basic support for GRANT and REVOKE (#365) - Thanks @blx!
|
||||
|
||||
### Changed
|
||||
* Use Rust 2021 edition (#368) - Thanks @Jimexist!
|
||||
|
||||
### Fixed
|
||||
* Fix clippy errors (#367, #374) - Thanks @Jimexist!
|
||||
|
||||
|
||||
## [0.12.0] 2021-10-14
|
||||
|
||||
### Added
|
||||
* Add support for [NOT] IS DISTINCT FROM (#306) - @Dandandan
|
||||
|
||||
### Changed
|
||||
* Move the keywords module - Thanks @koushiro!
|
||||
|
||||
|
||||
## [0.11.0] 2021-09-24
|
||||
|
||||
### Added
|
||||
* Support minimum display width for integer data types (#337) Thanks @vasilev-alex!
|
||||
* Add logical XOR operator (#357) - Thanks @xzmrdltl!
|
||||
* Support DESCRIBE table_name (#340) - Thanks @ovr!
|
||||
* Support SHOW CREATE TABLE|EVENT|FUNCTION (#338) - Thanks @ovr!
|
||||
* Add referential actions to TableConstraint foreign key (#306) - Thanks @joshwd36!
|
||||
|
||||
### Changed
|
||||
* Enable map access for numbers, multiple nesting levels (#356) - Thanks @Igosuki!
|
||||
* Rename Token::Mult to Token::Mul (#353) - Thanks @koushiro!
|
||||
* Use derive(Default) for HiveFormat (#348) - Thanks @koushiro!
|
||||
* Improve tokenizer error (#347) - Thanks @koushiro!
|
||||
* Eliminate redundant string copy in Tokenizer (#343) - Thanks @koushiro!
|
||||
* Update bigdecimal requirement from 0.2 to 0.3 dependencies (#341)
|
||||
* Support parsing hexadecimal literals that start with `0x` (#324) - Thanks @TheSchemm!
|
||||
|
||||
|
||||
## [0.10.0] 2021-08-23
|
||||
|
||||
### Added
|
||||
* Support for `no_std` (#332) - Thanks @koushiro!
|
||||
* Postgres regular expression operators (`~`, `~*`, `!~`, `!~*`) (#328) - Thanks @b41sh!
|
||||
* tinyint (#320) - Thanks @sundy-li
|
||||
* ILIKE (#300) - Thanks @maxcountryman!
|
||||
* TRIM syntax (#331, #334) - Thanks ever0de
|
||||
|
||||
|
||||
### Fixed
|
||||
* Return error instead of panic (#316) - Thanks @BohuTANG!
|
||||
|
||||
### Changed
|
||||
- Rename `Modulus` to `Modulo` (#335) - Thanks @RGRAVITY817!
|
||||
- Update links to reflect repository move to `sqlparser-rs` GitHub org (#333) - Thanks @andygrove
|
||||
- Add default value for `WindowFrame` (#313) - Thanks @Jimexist!
|
||||
|
||||
## [0.9.0] 2021-03-21
|
||||
|
||||
### Added
|
||||
* Add support for `TRY_CAST` syntax (#299) - Thanks @seddonm1!
|
||||
|
||||
## [0.8.0] 2021-02-20
|
||||
|
||||
### Added
|
||||
* Introduce Hive QL dialect `HiveDialect` and syntax (#235) - Thanks @hntd187!
|
||||
* Add `SUBSTRING(col [FROM <expr>] [FOR <expr>])` syntax (#293)
|
||||
* Support parsing floats without leading digits `.01` (#294)
|
||||
* Support parsing multiple show variables (#290) - Thanks @francis-du!
|
||||
* Support SQLite `INSERT OR [..]` syntax (#281) - Thanks @zhangli-pear!
|
||||
|
||||
## [0.7.0] 2020-12-28
|
||||
|
||||
### Changed
|
||||
- Change the MySQL dialect to support `` `identifiers` `` quoted with backticks instead of the standard `"double-quoted"` identifiers (#247) - thanks @mashuai!
|
||||
- Update bigdecimal requirement from 0.1 to 0.2 (#268)
|
||||
|
||||
### Added
|
||||
- Enable dialect-specific behaviours in the parser (`dialect_of!()`) (#254) - thanks @eyalleshem!
|
||||
- Support named arguments in function invocations (`ARG_NAME => val`) (#250) - thanks @eyalleshem!
|
||||
- Support `TABLE()` functions in `FROM` (#253) - thanks @eyalleshem!
|
||||
- Support Snowflake's single-line comments starting with '#' or '//' (#264) - thanks @eyalleshem!
|
||||
- Support PostgreSQL `PREPARE`, `EXECUTE`, and `DEALLOCATE` (#243) - thanks @silathdiir!
|
||||
- Support PostgreSQL math operators (#267) - thanks @alex-dukhno!
|
||||
- Add SQLite dialect (#248) - thanks @mashuai!
|
||||
- Add Snowflake dialect (#259) - thanks @eyalleshem!
|
||||
- Support for Recursive CTEs - thanks @rhanqtl!
|
||||
- Support `FROM (table_name) alias` syntax - thanks @eyalleshem!
|
||||
- Support for `EXPLAIN [ANALYZE] VERBOSE` - thanks @ovr!
|
||||
- Support `ANALYZE TABLE`
|
||||
- DDL:
|
||||
- Support `OR REPLACE` in `CREATE VIEW`/`TABLE` (#239) - thanks @Dandandan!
|
||||
- Support specifying `ASC`/`DESC` in index columns (#249) - thanks @mashuai!
|
||||
- Support SQLite `AUTOINCREMENT` and MySQL `AUTO_INCREMENT` column option in `CREATE TABLE` (#234) - thanks @mashuai!
|
||||
- Support PostgreSQL `IF NOT EXISTS` for `CREATE SCHEMA` (#276) - thanks @alex-dukhno!
|
||||
|
||||
### Fixed
|
||||
- Fix a typo in `JSONFILE` serialization, introduced in 0.3.1 (#237)
|
||||
- Change `CREATE INDEX` serialization to not end with a semicolon, introduced in 0.5.1 (#245)
|
||||
- Don't fail parsing `ALTER TABLE ADD COLUMN` ending with a semicolon, introduced in 0.5.1 (#246) - thanks @mashuai
|
||||
|
||||
## [0.6.1] - 2020-07-20
|
||||
|
||||
### Added
|
||||
- Support BigQuery `ASSERT` statement (#226)
|
||||
|
||||
## [0.6.0] - 2020-07-20
|
||||
|
||||
### Added
|
||||
- Support SQLite's `CREATE TABLE (...) WITHOUT ROWID` (#208) - thanks @mashuai!
|
||||
- Support SQLite's `CREATE VIRTUAL TABLE` (#209) - thanks @mashuai!
|
||||
|
||||
## [0.5.1] - 2020-06-26
|
||||
This release should have been called `0.6`, as it introduces multiple incompatible changes to the API. If you don't want to upgrade yet, you can revert to the previous version by changing your `Cargo.toml` to:
|
||||
|
||||
sqlparser = "= 0.5.0"
|
||||
|
||||
|
||||
### Changed
|
||||
- **`Parser::parse_sql` now accepts a `&str` instead of `String` (#182)** - thanks @Dandandan!
|
||||
- Change `Ident` (previously a simple `String`) to store the parsed (unquoted) `value` of the identifier and the `quote_style` separately (#143) - thanks @apparebit!
|
||||
- Support Snowflake's `FROM (table_name)` (#155) - thanks @eyalleshem!
|
||||
- Add line and column number to TokenizerError (#194) - thanks @Dandandan!
|
||||
- Use Token::EOF instead of Option<Token> (#195)
|
||||
- Make the units keyword following `INTERVAL '...'` optional (#184) - thanks @maxcountryman!
|
||||
- Generalize `DATE`/`TIME`/`TIMESTAMP` literals representation in the AST (`TypedString { data_type, value }`) and allow `DATE` and other keywords to be used as identifiers when not followed by a string (#187) - thanks @maxcountryman!
|
||||
- Output DataType capitalized (`fmt::Display`) (#202) - thanks @Dandandan!
|
||||
|
||||
### Added
|
||||
- Support MSSQL `TOP (<N>) [ PERCENT ] [ WITH TIES ]` (#150) - thanks @alexkyllo!
|
||||
- Support MySQL `LIMIT row_count OFFSET offset` (not followed by `ROW` or `ROWS`) and remember which variant was parsed (#158) - thanks @mjibson!
|
||||
- Support PostgreSQL `CREATE TABLE IF NOT EXISTS table_name` (#163) - thanks @alex-dukhno!
|
||||
- Support basic forms of `CREATE INDEX` and `DROP INDEX` (#167) - thanks @mashuai!
|
||||
- Support `ON { UPDATE | DELETE } { RESTRICT | CASCADE | SET NULL | NO ACTION | SET DEFAULT }` in `FOREIGN KEY` constraints (#170) - thanks @c7hm4r!
|
||||
- Support basic forms of `CREATE SCHEMA` and `DROP SCHEMA` (#173) - thanks @alex-dukhno!
|
||||
- Support `NULLS FIRST`/`LAST` in `ORDER BY` expressions (#176) - thanks @houqp!
|
||||
- Support `LISTAGG()` (#174) - thanks @maxcountryman!
|
||||
- Support the string concatentation operator `||` (#178) - thanks @Dandandan!
|
||||
- Support bitwise AND (`&`), OR (`|`), XOR (`^`) (#181) - thanks @Dandandan!
|
||||
- Add serde support to AST structs and enums (#196) - thanks @panarch!
|
||||
- Support `ALTER TABLE ADD COLUMN`, `RENAME COLUMN`, and `RENAME TO` (#203) - thanks @mashuai!
|
||||
- Support `ALTER TABLE DROP COLUMN` (#148) - thanks @ivanceras!
|
||||
- Support `CREATE TABLE ... AS ...` (#206) - thanks @Dandandan!
|
||||
|
||||
### Fixed
|
||||
- Report an error for unterminated string literals (#165)
|
||||
- Make file format (`STORED AS`) case insensitive (#200) and don't allow quoting it (#201) - thanks @Dandandan!
|
||||
|
||||
## [0.5.0] - 2019-10-10
|
||||
|
||||
### Changed
|
||||
- Replace the `Value::Long(u64)` and `Value::Double(f64)` variants with `Value::Number(String)` to avoid losing precision when parsing decimal literals (#130) - thanks @benesch!
|
||||
- `--features bigdecimal` can be enabled to work with `Value::Number(BigDecimal)` instead, at the cost of an additional dependency.
|
||||
|
||||
### Added
|
||||
- Support MySQL `SHOW COLUMNS`, `SET <variable>=<value>`, and `SHOW <variable>` statements (#135) - thanks @quodlibetor and @benesch!
|
||||
|
||||
### Fixed
|
||||
- Don't fail to parse `START TRANSACTION` followed by a semicolon (#139) - thanks @gaffneyk!
|
||||
|
||||
|
||||
## [0.4.0] - 2019-07-02
|
||||
This release brings us closer to SQL-92 support, mainly thanks to the improvements contributed back from @MaterializeInc's fork and other work by @benesch.
|
||||
|
||||
### Changed
|
||||
- Remove "SQL" from type and enum variant names, `SQLType` -> `DataType`, remove "sql" prefix from module names (#105, #122)
|
||||
- Rename `ASTNode` -> `Expr` (#119)
|
||||
- Improve consistency of binary/unary op nodes (#112):
|
||||
- `ASTNode::SQLBinaryExpr` is now `Expr::BinaryOp` and `ASTNode::SQLUnary` is `Expr::UnaryOp`;
|
||||
- The `op: SQLOperator` field is now either a `BinaryOperator` or an `UnaryOperator`.
|
||||
- Change the representation of JOINs to match the standard (#109): `SQLSelect`'s `relation` and `joins` are replaced with `from: Vec<TableWithJoins>`. Before this change `FROM foo NATURAL JOIN bar, baz` was represented as "foo" as the `relation` followed by two joins (`Inner(Natural)` and `Implicit`); now it's two `TableWithJoins` (`foo NATURAL JOIN bar` and `baz`).
|
||||
- Extract a `SQLFunction` struct (#89)
|
||||
- Replace `Option<Vec<T>>` with `Vec<T>` in the AST structs (#73)
|
||||
- Change `Value::Long()` to be unsigned, use u64 consistently (#65)
|
||||
|
||||
### Added
|
||||
- Infra:
|
||||
- Implement `fmt::Display` on AST nodes (#124) - thanks @vemoo!
|
||||
- Implement `Hash` (#88) and `Eq` (#123) on all AST nodes
|
||||
- Implement `std::error::Error` for `ParserError` (#72)
|
||||
- Handle Windows line-breaks (#54)
|
||||
- Expressions:
|
||||
- Support `INTERVAL` literals (#103)
|
||||
- Support `DATE` / `TIME` / `TIMESTAMP` literals (#99)
|
||||
- Support `EXTRACT` (#96)
|
||||
- Support `X'hex value'` literals (#95)
|
||||
- Support `EXISTS` subqueries (#90)
|
||||
- Support nested expressions in `BETWEEN` (#80)
|
||||
- Support `COUNT(DISTINCT x)` and similar (#77)
|
||||
- Support `CASE operand WHEN expected_value THEN ..` and table-valued functions (#59)
|
||||
- Support analytic (window) functions (`OVER` clause) (#50)
|
||||
- Queries / DML:
|
||||
- Support nested joins (#100) and derived tables with set operations (#111)
|
||||
- Support `UPDATE` statements (#97)
|
||||
- Support `INSERT INTO foo SELECT * FROM bar` and `FROM VALUES (...)` (#91)
|
||||
- Support `SELECT ALL` (#76)
|
||||
- Add `FETCH` and `OFFSET` support, and `LATERAL` (#69) - thanks @thomas-jeepe!
|
||||
- Support `COLLATE`, optional column list in CTEs (#64)
|
||||
- DDL/TCL:
|
||||
- Support `START/SET/COMMIT/ROLLBACK TRANSACTION` (#106) - thanks @SamuelMarks!
|
||||
- Parse column constraints in any order (#93)
|
||||
- Parse `DECIMAL` and `DEC` aliases for `NUMERIC` type (#92)
|
||||
- Support `DROP [TABLE|VIEW]` (#75)
|
||||
- Support arbitrary `WITH` options for `CREATE [TABLE|VIEW]` (#74)
|
||||
- Support constraints in `CREATE TABLE` (#65)
|
||||
- Add basic MSSQL dialect (#61) and some MSSQL-specific features:
|
||||
- `CROSS`/`OUTER APPLY` (#120)
|
||||
- MSSQL identifier and alias parsing rules (#66)
|
||||
- `WITH` hints (#59)
|
||||
|
||||
### Fixed
|
||||
- Report an error for `SELECT * FROM a OUTER JOIN b` instead of parsing `OUTER` as an alias (#118)
|
||||
- Fix the precedence of `NOT LIKE` (#82) and unary `NOT` (#107)
|
||||
- Do not panic when `NOT` is not followed by an expected keyword (#71)
|
||||
successfully instead of returning a parse error - thanks @ivanceras! (#67) - and similar fixes for queries with no `FROM` (#116)
|
||||
- Fix issues with `ALTER TABLE ADD CONSTRAINT` parsing (#65)
|
||||
- Serialize the "not equals" operator as `<>` instead of `!=` (#64)
|
||||
- Remove dependencies on `uuid` (#59) and `chrono` (#61)
|
||||
- Make `SELECT` query with `LIMIT` clause but no `WHERE` parse - Fix incorrect behavior of `ASTNode::SQLQualifiedWildcard::to_string()` (returned `foo*` instead of `foo.*`) - thanks @thomas-jeepe! (#52)
|
||||
|
||||
## [0.3.1] - 2019-04-20
|
||||
### Added
|
||||
- Extended `SQLStatement::SQLCreateTable` to support Hive's EXTERNAL TABLES (`CREATE EXTERNAL TABLE .. STORED AS .. LOCATION '..'`) - thanks @zhzy0077! (#46)
|
||||
- Parse `SELECT DISTINCT` to `SQLSelect::distinct` (#49)
|
||||
|
||||
## [0.3.0] - 2019-04-03
|
||||
### Changed
|
||||
This release includes major changes to the AST structs to add a number of features, as described in #37 and #43. In particular:
|
||||
- `ASTNode` variants that represent statements were extracted from `ASTNode` into a separate `SQLStatement` enum;
|
||||
- `Parser::parse_sql` now returns a `Vec` of parsed statements.
|
||||
- `ASTNode` now represents an expression (renamed to `Expr` in 0.4.0)
|
||||
- The query representation (formerly `ASTNode::SQLSelect`) became more complicated to support:
|
||||
- `WITH` and `UNION`/`EXCEPT`/`INTERSECT` (via `SQLQuery`, `Cte`, and `SQLSetExpr`),
|
||||
- aliases and qualified wildcards in `SELECT` (via `SQLSelectItem`),
|
||||
- and aliases in `FROM`/`JOIN` (via `TableFactor`).
|
||||
- A new `SQLObjectName` struct is used instead of `String` or `ASTNode::SQLCompoundIdentifier` - for objects like tables, custom types, etc.
|
||||
- Added support for "delimited identifiers" and made keywords context-specific (thus accepting them as valid identifiers in most contexts) - **this caused a regression in parsing `SELECT .. FROM .. LIMIT ..` (#67), fixed in 0.4.0**
|
||||
|
||||
### Added
|
||||
Other than the changes listed above, some less intrusive additions include:
|
||||
- Support `CREATE [MATERIALIZED] VIEW` statement
|
||||
- Support `IN`, `BETWEEN`, unary +/- in epressions
|
||||
- Support `CHAR` data type and `NUMERIC` not followed by `(p,s)`.
|
||||
- Support national string literals (`N'...'`)
|
||||
|
||||
## [0.2.4] - 2019-03-08
|
||||
Same as 0.2.2.
|
||||
|
||||
## [0.2.3] - 2019-03-08 [YANKED]
|
||||
|
||||
## [0.2.2] - 2019-03-08
|
||||
### Changed
|
||||
- Removed `Value::String`, `Value::DoubleQuotedString`, and `Token::String`, making
|
||||
- `'...'` parse as a string literal (`Value::SingleQuotedString`), and
|
||||
- `"..."` fail to parse until version 0.3.0 (#36)
|
||||
|
||||
## [0.2.1] - 2019-01-13
|
||||
We don't have a changelog for the changes made in 2018, but thanks to @crw5996, @cswinter, @fredrikroos, @ivanceras, @nickolay, @virattara for their contributions in the early stages of the project!
|
||||
|
||||
## [0.1.0] - 2018-09-03
|
||||
Initial release
|
||||
|
|
49
Cargo.toml
49
Cargo.toml
|
@ -1,34 +1,16 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
[package]
|
||||
name = "sqlparser"
|
||||
description = "Extensible SQL Lexer and Parser with support for ANSI SQL:2011"
|
||||
version = "0.57.0"
|
||||
authors = ["Apache DataFusion <dev@datafusion.apache.org>"]
|
||||
homepage = "https://github.com/apache/datafusion-sqlparser-rs"
|
||||
version = "0.24.0"
|
||||
authors = ["Andy Grove <andygrove73@gmail.com>"]
|
||||
homepage = "https://github.com/sqlparser-rs/sqlparser-rs"
|
||||
documentation = "https://docs.rs/sqlparser/"
|
||||
keywords = ["ansi", "sql", "lexer", "parser"]
|
||||
repository = "https://github.com/apache/datafusion-sqlparser-rs"
|
||||
keywords = [ "ansi", "sql", "lexer", "parser" ]
|
||||
repository = "https://github.com/sqlparser-rs/sqlparser-rs"
|
||||
license = "Apache-2.0"
|
||||
include = [
|
||||
"src/**/*.rs",
|
||||
"Cargo.toml",
|
||||
"LICENSE.TXT",
|
||||
]
|
||||
edition = "2021"
|
||||
|
||||
|
@ -37,30 +19,27 @@ name = "sqlparser"
|
|||
path = "src/lib.rs"
|
||||
|
||||
[features]
|
||||
default = ["std", "recursive-protection"]
|
||||
default = ["std"]
|
||||
std = []
|
||||
recursive-protection = ["std", "recursive"]
|
||||
# Enable JSON output in the `cli` example:
|
||||
json_example = ["serde_json", "serde"]
|
||||
visitor = ["sqlparser_derive"]
|
||||
|
||||
[dependencies]
|
||||
bigdecimal = { version = "0.4.1", features = ["serde"], optional = true }
|
||||
bigdecimal = { version = "0.3", features = ["serde"], optional = true }
|
||||
log = "0.4"
|
||||
recursive = { version = "0.1.1", optional = true}
|
||||
|
||||
serde = { version = "1.0", default-features = false, features = ["derive", "alloc"], optional = true }
|
||||
serde = { version = "1.0", features = ["derive"], optional = true }
|
||||
# serde_json is only used in examples/cli, but we have to put it outside
|
||||
# of dev-dependencies because of
|
||||
# https://github.com/rust-lang/cargo/issues/1596
|
||||
serde_json = { version = "1.0", optional = true }
|
||||
sqlparser_derive = { version = "0.3.0", path = "derive", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
simple_logger = "5.0"
|
||||
simple_logger = "2.1"
|
||||
matches = "0.1"
|
||||
pretty_assertions = "1"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
# Document these features on docs.rs
|
||||
features = ["serde", "visitor"]
|
||||
[package.metadata.release]
|
||||
# Instruct `cargo release` to not run `cargo publish` locally:
|
||||
# https://github.com/sunng87/cargo-release/blob/master/docs/reference.md#config-fields
|
||||
# See docs/releasing.md for details.
|
||||
disable-publish = true
|
||||
|
|
23
HEADER
23
HEADER
|
@ -1,16 +1,11 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
184
README.md
184
README.md
|
@ -1,22 +1,3 @@
|
|||
<!---
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Extensible SQL Lexer and Parser for Rust
|
||||
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
|
@ -25,10 +6,13 @@
|
|||
[](https://coveralls.io/github/sqlparser-rs/sqlparser-rs?branch=main)
|
||||
[](https://gitter.im/sqlparser-rs/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
This crate contains a lexer and parser for SQL that conforms with the
|
||||
[ANSI/ISO SQL standard][sql-standard] and other dialects. This crate
|
||||
is used as a foundation for SQL query engines, vendor-specific
|
||||
parsers, and various SQL analysis.
|
||||
The goal of this project is to build a SQL lexer and parser capable of parsing
|
||||
SQL that conforms with the [ANSI/ISO SQL standard][sql-standard] while also
|
||||
making it easy to support custom dialects so that this crate can be used as a
|
||||
foundation for vendor-specific parsers.
|
||||
|
||||
This parser is currently being used by the [DataFusion] query engine,
|
||||
[LocustDB], [Ballista] and [GlueSQL].
|
||||
|
||||
## Example
|
||||
|
||||
|
@ -53,77 +37,13 @@ println!("AST: {:?}", ast);
|
|||
This outputs
|
||||
|
||||
```rust
|
||||
AST: [Query(Query { ctes: [], body: Select(Select { distinct: false, projection: [UnnamedExpr(Identifier("a")), UnnamedExpr(Identifier("b")), UnnamedExpr(Value(Long(123))), UnnamedExpr(Function(Function { name:ObjectName([Identifier(Ident { value: "myfunc", quote_style: None })]), args: [Identifier("b")], filter: None, over: None, distinct: false }))], from: [TableWithJoins { relation: Table { name: ObjectName([Identifier(Ident { value: "table_1", quote_style: None })]), alias: None, args: [], with_hints: [] }, joins: [] }], selection: Some(BinaryOp { left: BinaryOp { left: Identifier("a"), op: Gt, right: Identifier("b") }, op: And, right: BinaryOp { left: Identifier("b"), op: Lt, right: Value(Long(100)) } }), group_by: [], having: None }), order_by: [OrderByExpr { expr: Identifier("a"), asc: Some(false) }, OrderByExpr { expr: Identifier("b"), asc: None }], limit: None, offset: None, fetch: None })]
|
||||
AST: [Query(Query { ctes: [], body: Select(Select { distinct: false, projection: [UnnamedExpr(Identifier("a")), UnnamedExpr(Identifier("b")), UnnamedExpr(Value(Long(123))), UnnamedExpr(Function(Function { name: ObjectName(["myfunc"]), args: [Identifier("b")], over: None, distinct: false }))], from: [TableWithJoins { relation: Table { name: ObjectName(["table_1"]), alias: None, args: [], with_hints: [] }, joins: [] }], selection: Some(BinaryOp { left: BinaryOp { left: Identifier("a"), op: Gt, right: Identifier("b") }, op: And, right: BinaryOp { left: Identifier("b"), op: Lt, right: Value(Long(100)) } }), group_by: [], having: None }), order_by: [OrderByExpr { expr: Identifier("a"), asc: Some(false) }, OrderByExpr { expr: Identifier("b"), asc: None }], limit: None, offset: None, fetch: None })]
|
||||
```
|
||||
|
||||
|
||||
## Features
|
||||
|
||||
The following optional [crate features](https://doc.rust-lang.org/cargo/reference/features.html) are available:
|
||||
|
||||
* `serde`: Adds [Serde](https://serde.rs/) support by implementing `Serialize` and `Deserialize` for all AST nodes.
|
||||
* `visitor`: Adds a `Visitor` capable of recursively walking the AST tree.
|
||||
* `recursive-protection` (enabled by default), uses [recursive](https://docs.rs/recursive/latest/recursive/) for stack overflow protection.
|
||||
|
||||
## Syntax vs Semantics
|
||||
|
||||
This crate provides only a syntax parser, and tries to avoid applying
|
||||
any SQL semantics, and accepts queries that specific databases would
|
||||
reject, even when using that Database's specific `Dialect`. For
|
||||
example, `CREATE TABLE(x int, x int)` is accepted by this crate, even
|
||||
though most SQL engines will reject this statement due to the repeated
|
||||
column name `x`.
|
||||
|
||||
This crate avoids semantic analysis because it varies drastically
|
||||
between dialects and implementations. If you want to do semantic
|
||||
analysis, feel free to use this project as a base.
|
||||
|
||||
## Preserves Syntax Round Trip
|
||||
|
||||
This crate allows users to recover the original SQL text (with comments removed,
|
||||
normalized whitespace and keyword capitalization), which is useful for tools
|
||||
that analyze and manipulate SQL.
|
||||
|
||||
This means that other than comments, whitespace and the capitalization of
|
||||
keywords, the following should hold true for all SQL:
|
||||
|
||||
```rust
|
||||
// Parse SQL
|
||||
let sql = "SELECT 'hello'";
|
||||
let ast = Parser::parse_sql(&GenericDialect, sql).unwrap();
|
||||
|
||||
// The original SQL text can be generated from the AST
|
||||
assert_eq!(ast[0].to_string(), sql);
|
||||
|
||||
// The SQL can also be pretty-printed with newlines and indentation
|
||||
assert_eq!(format!("{:#}", ast[0]), "SELECT\n 'hello'");
|
||||
## Command line
|
||||
To parse a file and dump the results as JSON:
|
||||
```
|
||||
|
||||
There are still some cases in this crate where different SQL with seemingly
|
||||
similar semantics are represented with the same AST. We welcome PRs to fix such
|
||||
issues and distinguish different syntaxes in the AST.
|
||||
|
||||
|
||||
## Source Locations (Work in Progress)
|
||||
|
||||
This crate allows recovering source locations from AST nodes via the [Spanned]
|
||||
trait, which can be used for advanced diagnostics tooling. Note that this
|
||||
feature is a work in progress and many nodes report missing or inaccurate spans.
|
||||
Please see [this ticket] for information on how to contribute missing
|
||||
improvements.
|
||||
|
||||
[Spanned]: https://docs.rs/sqlparser/latest/sqlparser/ast/trait.Spanned.html
|
||||
[this ticket]: https://github.com/apache/datafusion-sqlparser-rs/issues/1548
|
||||
|
||||
```rust
|
||||
// Parse SQL
|
||||
let ast = Parser::parse_sql(&GenericDialect, "SELECT A FROM B").unwrap();
|
||||
|
||||
// The source span can be retrieved with start and end locations
|
||||
assert_eq!(ast[0].span(), Span {
|
||||
start: Location::of(1, 1),
|
||||
end: Location::of(1, 16),
|
||||
});
|
||||
$ cargo run --features json_example --example cli FILENAME.sql [--dialectname]
|
||||
```
|
||||
|
||||
## SQL compliance
|
||||
|
@ -132,9 +52,9 @@ SQL was first standardized in 1987, and revisions of the standard have been
|
|||
published regularly since. Most revisions have added significant new features to
|
||||
the language, and as a result no database claims to support the full breadth of
|
||||
features. This parser currently supports most of the SQL-92 syntax, plus some
|
||||
syntax from newer versions that have been explicitly requested, plus various
|
||||
other dialect-specific syntax. Whenever possible, the [online SQL:2016
|
||||
grammar][sql-2016-grammar] is used to guide what syntax to accept.
|
||||
syntax from newer versions that have been explicitly requested, plus some MSSQL,
|
||||
PostgreSQL, and other dialect-specific syntax. Whenever possible, the [online
|
||||
SQL:2016 grammar][sql-2016-grammar] is used to guide what syntax to accept.
|
||||
|
||||
Unfortunately, stating anything more specific about compliance is difficult.
|
||||
There is no publicly available test suite that can assess compliance
|
||||
|
@ -150,21 +70,10 @@ that are actually used. Note that if you urgently need support for a feature,
|
|||
you will likely need to write the implementation yourself. See the
|
||||
[Contributing](#Contributing) section for details.
|
||||
|
||||
## Command line
|
||||
### Supporting custom SQL dialects
|
||||
|
||||
This crate contains a CLI program that can parse a file and dump the results as JSON:
|
||||
```
|
||||
$ cargo run --features json_example --example cli FILENAME.sql [--dialectname]
|
||||
```
|
||||
|
||||
## Users
|
||||
|
||||
This parser is currently being used by the [DataFusion] query engine, [LocustDB],
|
||||
[Ballista], [GlueSQL], [Opteryx], [Polars], [PRQL], [Qrlew], [JumpWire], [ParadeDB], [CipherStash Proxy],
|
||||
and [GreptimeDB].
|
||||
|
||||
If your project is using sqlparser-rs feel free to make a PR to add it
|
||||
to this list.
|
||||
This is a work in progress, but we have some notes on [writing a custom SQL
|
||||
parser](docs/custom_sql_parser.md).
|
||||
|
||||
## Design
|
||||
|
||||
|
@ -183,44 +92,31 @@ reasons:
|
|||
- It is far easier to extend and make dialect-specific extensions
|
||||
compared to using a parser generator
|
||||
|
||||
### Supporting custom SQL dialects
|
||||
|
||||
This is a work in progress, but we have some notes on [writing a custom SQL
|
||||
parser](docs/custom_sql_parser.md).
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are highly encouraged! However, the bandwidth we have to
|
||||
maintain this crate is limited. Please read the following sections carefully.
|
||||
maintain this crate is fairly limited.
|
||||
|
||||
### New Syntax
|
||||
|
||||
The most commonly accepted PRs add support for or fix a bug in a feature in the
|
||||
SQL standard, or a popular RDBMS, such as Microsoft SQL
|
||||
Pull requests that add support for or fix a bug in a feature in the
|
||||
SQL standard, or a feature in a popular RDBMS, like Microsoft SQL
|
||||
Server or PostgreSQL, will likely be accepted after a brief
|
||||
review. Any SQL feature that is dialect specific should be parsed by *both* the relevant [`Dialect`]
|
||||
as well as [`GenericDialect`].
|
||||
|
||||
### Major API Changes
|
||||
review.
|
||||
|
||||
The current maintainers do not plan for any substantial changes to
|
||||
this crate's API. PRs proposing major refactors
|
||||
this crate's API at this time. And thus, PRs proposing major refactors
|
||||
are not likely to be accepted.
|
||||
|
||||
### Testing
|
||||
|
||||
While we hope to review PRs in a reasonably
|
||||
timely fashion, it may take a week or more. In order to speed the process,
|
||||
Please be aware that, while we hope to review PRs in a reasonably
|
||||
timely fashion, it may take a while. In order to speed the process,
|
||||
please make sure the PR passes all CI checks, and includes tests
|
||||
demonstrating your code works as intended (and to avoid
|
||||
regressions). Remember to also test error paths.
|
||||
|
||||
PRs without tests will not be reviewed or merged. Since the CI
|
||||
ensures that `cargo test`, `cargo fmt`, and `cargo clippy`, pass you
|
||||
should likely to run all three commands locally before submitting
|
||||
will likely want to run all three commands locally before submitting
|
||||
your PR.
|
||||
|
||||
### Filing Issues
|
||||
|
||||
If you are unable to submit a patch, feel free to file an issue instead. Please
|
||||
try to include:
|
||||
|
@ -231,27 +127,11 @@ try to include:
|
|||
* links to documentation for the feature for a few of the most popular
|
||||
databases that support it.
|
||||
|
||||
Unfortunately, if you need support for a feature, you will likely need to implement
|
||||
it yourself, or file a well enough described ticket that another member of the community can do so.
|
||||
Our goal as maintainers is to facilitate the integration
|
||||
If you need support for a feature, you will likely need to implement
|
||||
it yourself. Our goal as maintainers is to facilitate the integration
|
||||
of various features from various contributors, but not to provide the
|
||||
implementations ourselves, as we simply don't have the resources.
|
||||
|
||||
### Benchmarking
|
||||
|
||||
There are several micro benchmarks in the `sqlparser_bench` directory.
|
||||
You can run them with:
|
||||
|
||||
```
|
||||
git checkout main
|
||||
cd sqlparser_bench
|
||||
cargo bench -- --save-baseline main
|
||||
git checkout <your branch>
|
||||
cargo bench -- --baseline main
|
||||
```
|
||||
|
||||
By adding the `--save-baseline main` and `--baseline main` you can track the
|
||||
progress of your improvements as you continue working on the feature branch.
|
||||
|
||||
## Licensing
|
||||
|
||||
|
@ -269,16 +149,6 @@ licensed as above, without any additional terms or conditions.
|
|||
[LocustDB]: https://github.com/cswinter/LocustDB
|
||||
[Ballista]: https://github.com/apache/arrow-ballista
|
||||
[GlueSQL]: https://github.com/gluesql/gluesql
|
||||
[Opteryx]: https://github.com/mabel-dev/opteryx
|
||||
[Polars]: https://pola.rs/
|
||||
[PRQL]: https://github.com/PRQL/prql
|
||||
[Qrlew]: https://github.com/Qrlew/qrlew
|
||||
[JumpWire]: https://github.com/extragoodlabs/jumpwire
|
||||
[ParadeDB]: https://github.com/paradedb/paradedb
|
||||
[Pratt Parser]: https://tdop.github.io/
|
||||
[sql-2016-grammar]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html
|
||||
[sql-standard]: https://en.wikipedia.org/wiki/ISO/IEC_9075
|
||||
[`Dialect`]: https://docs.rs/sqlparser/latest/sqlparser/dialect/trait.Dialect.html
|
||||
[`GenericDialect`]: https://docs.rs/sqlparser/latest/sqlparser/dialect/struct.GenericDialect.html
|
||||
[CipherStash Proxy]: https://github.com/cipherstash/proxy
|
||||
[GreptimeDB]: https://github.com/GreptimeTeam/greptimedb
|
||||
|
|
19
SECURITY.md
19
SECURITY.md
|
@ -1,22 +1,3 @@
|
|||
<!---
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Security Policy
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,104 +0,0 @@
|
|||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# sqlparser-rs 0.52.0 Changelog
|
||||
|
||||
This release consists of 45 commits from 20 contributors. See credits at the end of this changelog for more information.
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- feat: support explain options [#1426](https://github.com/apache/datafusion-sqlparser-rs/pull/1426) (kysshsy)
|
||||
- feat: adding Display implementation to DELETE and INSERT [#1427](https://github.com/apache/datafusion-sqlparser-rs/pull/1427) (seve-martinez)
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- fix: `maybe_parse` preventing parser from erroring on recursion limit [#1464](https://github.com/apache/datafusion-sqlparser-rs/pull/1464) (tomershaniii)
|
||||
|
||||
**Other:**
|
||||
|
||||
- Fix parsing of negative values [#1419](https://github.com/apache/datafusion-sqlparser-rs/pull/1419) (agscpp)
|
||||
- Allow to use ON CLUSTER cluster_name in TRUNCATE syntax [#1428](https://github.com/apache/datafusion-sqlparser-rs/pull/1428) (git-hulk)
|
||||
- chore: remove redundant punctuation [#1434](https://github.com/apache/datafusion-sqlparser-rs/pull/1434) (Fischer0522)
|
||||
- MS SQL Server: add support for IDENTITY column option [#1432](https://github.com/apache/datafusion-sqlparser-rs/pull/1432) (7phs)
|
||||
- Update to ASF header / add when missing [#1437](https://github.com/apache/datafusion-sqlparser-rs/pull/1437) (alamb)
|
||||
- Some small optimizations [#1424](https://github.com/apache/datafusion-sqlparser-rs/pull/1424) (exrok)
|
||||
- Fix `codestyle` CI check [#1438](https://github.com/apache/datafusion-sqlparser-rs/pull/1438) (alamb)
|
||||
- Implements CREATE POLICY syntax for PostgreSQL [#1440](https://github.com/apache/datafusion-sqlparser-rs/pull/1440) (git-hulk)
|
||||
- make `parse_expr_with_alias` public [#1444](https://github.com/apache/datafusion-sqlparser-rs/pull/1444) (Eason0729)
|
||||
- Implements DROP POLICY syntax for PostgreSQL [#1445](https://github.com/apache/datafusion-sqlparser-rs/pull/1445) (git-hulk)
|
||||
- Support `DROP DATABASE` [#1443](https://github.com/apache/datafusion-sqlparser-rs/pull/1443) (linhr)
|
||||
- Implements ALTER POLICY syntax for PostgreSQL [#1446](https://github.com/apache/datafusion-sqlparser-rs/pull/1446) (git-hulk)
|
||||
- Add a note discouraging new use of `dialect_of` macro [#1448](https://github.com/apache/datafusion-sqlparser-rs/pull/1448) (alamb)
|
||||
- Expand handling of `LIMIT 1, 2` handling to include sqlite [#1447](https://github.com/apache/datafusion-sqlparser-rs/pull/1447) (joshuawarner32)
|
||||
- Fix always uses CommentDef::WithoutEq while parsing the inline comment [#1453](https://github.com/apache/datafusion-sqlparser-rs/pull/1453) (git-hulk)
|
||||
- Add support for the LIKE ANY and ILIKE ANY pattern-matching condition [#1456](https://github.com/apache/datafusion-sqlparser-rs/pull/1456) (yoavcloud)
|
||||
- added ability to parse extension to parse_comment inside postgres dialect [#1451](https://github.com/apache/datafusion-sqlparser-rs/pull/1451) (MaxwellKnight)
|
||||
- Snowflake: support of views column comment [#1441](https://github.com/apache/datafusion-sqlparser-rs/pull/1441) (7phs)
|
||||
- Add SQLite "ON CONFLICT" column option in CREATE TABLE statements [#1442](https://github.com/apache/datafusion-sqlparser-rs/pull/1442) (nucccc)
|
||||
- Add support for ASC and DESC in CREATE TABLE column constraints for SQLite. [#1462](https://github.com/apache/datafusion-sqlparser-rs/pull/1462) (caldwell)
|
||||
- Add support of `EXPLAIN QUERY PLAN` syntax for SQLite dialect [#1458](https://github.com/apache/datafusion-sqlparser-rs/pull/1458) (git-hulk)
|
||||
- Add "DROP TYPE" support. [#1461](https://github.com/apache/datafusion-sqlparser-rs/pull/1461) (caldwell)
|
||||
- chore: Add asf.yaml [#1463](https://github.com/apache/datafusion-sqlparser-rs/pull/1463) (Xuanwo)
|
||||
- Add support for quantified comparison predicates (ALL/ANY/SOME) [#1459](https://github.com/apache/datafusion-sqlparser-rs/pull/1459) (yoavcloud)
|
||||
- MySQL dialect: Add support for hash comments [#1466](https://github.com/apache/datafusion-sqlparser-rs/pull/1466) (hansott)
|
||||
- Fix #1469 (SET ROLE regression) [#1474](https://github.com/apache/datafusion-sqlparser-rs/pull/1474) (lovasoa)
|
||||
- Add support for parsing MsSql alias with equals [#1467](https://github.com/apache/datafusion-sqlparser-rs/pull/1467) (yoavcloud)
|
||||
- Snowflake: support for extended column options in `CREATE TABLE` [#1454](https://github.com/apache/datafusion-sqlparser-rs/pull/1454) (7phs)
|
||||
- MsSQL TRY_CONVERT [#1477](https://github.com/apache/datafusion-sqlparser-rs/pull/1477) (yoavcloud)
|
||||
- Add PostgreSQL specfic "CREATE TYPE t AS ENUM (...)" support. [#1460](https://github.com/apache/datafusion-sqlparser-rs/pull/1460) (caldwell)
|
||||
- Fix build [#1483](https://github.com/apache/datafusion-sqlparser-rs/pull/1483) (yoavcloud)
|
||||
- Fix complex blocks warning when running clippy [#1488](https://github.com/apache/datafusion-sqlparser-rs/pull/1488) (git-hulk)
|
||||
- Add support for SHOW DATABASES/SCHEMAS/TABLES/VIEWS in Hive [#1487](https://github.com/apache/datafusion-sqlparser-rs/pull/1487) (yoavcloud)
|
||||
- Fix typo in `Dialect::supports_eq_alias_assigment` [#1478](https://github.com/apache/datafusion-sqlparser-rs/pull/1478) (alamb)
|
||||
- Add support for PostgreSQL `LISTEN/NOTIFY` syntax [#1485](https://github.com/apache/datafusion-sqlparser-rs/pull/1485) (wugeer)
|
||||
- Add support for TOP before ALL/DISTINCT [#1495](https://github.com/apache/datafusion-sqlparser-rs/pull/1495) (yoavcloud)
|
||||
- add support for `FOR ORDINALITY` and `NESTED` in JSON_TABLE [#1493](https://github.com/apache/datafusion-sqlparser-rs/pull/1493) (lovasoa)
|
||||
- Add Apache License to additional files [#1502](https://github.com/apache/datafusion-sqlparser-rs/pull/1502) (alamb)
|
||||
- Move CHANGELOG content [#1503](https://github.com/apache/datafusion-sqlparser-rs/pull/1503) (alamb)
|
||||
- improve support for T-SQL EXECUTE statements [#1490](https://github.com/apache/datafusion-sqlparser-rs/pull/1490) (lovasoa)
|
||||
|
||||
## Credits
|
||||
|
||||
Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor.
|
||||
|
||||
```
|
||||
8 Andrew Lamb
|
||||
7 Yoav Cohen
|
||||
7 hulk
|
||||
3 Aleksei Piianin
|
||||
3 David Caldwell
|
||||
3 Ophir LOJKINE
|
||||
1 Agaev Guseyn
|
||||
1 Eason
|
||||
1 Fischer
|
||||
1 Hans Ott
|
||||
1 Heran Lin
|
||||
1 Joshua Warner
|
||||
1 Maxwell Knight
|
||||
1 Seve Martinez
|
||||
1 Siyuan Huang
|
||||
1 Thomas Dagenais
|
||||
1 Xuanwo
|
||||
1 nucccc
|
||||
1 tomershaniii
|
||||
1 wugeer
|
||||
```
|
||||
|
||||
Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release.
|
||||
|
|
@ -1,95 +0,0 @@
|
|||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# sqlparser-rs 0.53.0 Changelog
|
||||
|
||||
This release consists of 47 commits from 16 contributors. See credits at the end of this changelog for more information.
|
||||
|
||||
**Other:**
|
||||
|
||||
- hive: support for special not expression `!a` and raise error for `a!` factorial operator [#1472](https://github.com/apache/datafusion-sqlparser-rs/pull/1472) (wugeer)
|
||||
- Add support for MSSQL's `OPENJSON WITH` clause [#1498](https://github.com/apache/datafusion-sqlparser-rs/pull/1498) (gaoqiangz)
|
||||
- Parse true and false as identifiers in mssql [#1510](https://github.com/apache/datafusion-sqlparser-rs/pull/1510) (lovasoa)
|
||||
- Fix the parsing error in MSSQL for multiple statements that include `DECLARE` statements [#1497](https://github.com/apache/datafusion-sqlparser-rs/pull/1497) (wugeer)
|
||||
- Add support for Snowflake SHOW DATABASES/SCHEMAS/TABLES/VIEWS/COLUMNS statements [#1501](https://github.com/apache/datafusion-sqlparser-rs/pull/1501) (yoavcloud)
|
||||
- Add support of COMMENT ON syntax for Snowflake [#1516](https://github.com/apache/datafusion-sqlparser-rs/pull/1516) (git-hulk)
|
||||
- Add support for MYSQL's `CREATE TABLE SELECT` expr [#1515](https://github.com/apache/datafusion-sqlparser-rs/pull/1515) (wugeer)
|
||||
- Add support for MSSQL's `XQuery` methods [#1500](https://github.com/apache/datafusion-sqlparser-rs/pull/1500) (gaoqiangz)
|
||||
- Add support for Hive's `LOAD DATA` expr [#1520](https://github.com/apache/datafusion-sqlparser-rs/pull/1520) (wugeer)
|
||||
- Fix ClickHouse document link from `Russian` to `English` [#1527](https://github.com/apache/datafusion-sqlparser-rs/pull/1527) (git-hulk)
|
||||
- Support ANTI and SEMI joins without LEFT/RIGHT [#1528](https://github.com/apache/datafusion-sqlparser-rs/pull/1528) (delamarch3)
|
||||
- support sqlite's OR clauses in update statements [#1530](https://github.com/apache/datafusion-sqlparser-rs/pull/1530) (lovasoa)
|
||||
- support column type definitions in table aliases [#1526](https://github.com/apache/datafusion-sqlparser-rs/pull/1526) (lovasoa)
|
||||
- Add support for MSSQL's `JSON_ARRAY`/`JSON_OBJECT` expr [#1507](https://github.com/apache/datafusion-sqlparser-rs/pull/1507) (gaoqiangz)
|
||||
- Add support for PostgreSQL `UNLISTEN` syntax and Add support for Postgres `LOAD extension` expr [#1531](https://github.com/apache/datafusion-sqlparser-rs/pull/1531) (wugeer)
|
||||
- Parse byte/bit string literals in MySQL and Postgres [#1532](https://github.com/apache/datafusion-sqlparser-rs/pull/1532) (mvzink)
|
||||
- Allow example CLI to read from stdin [#1536](https://github.com/apache/datafusion-sqlparser-rs/pull/1536) (mvzink)
|
||||
- recursive select calls are parsed with bad trailing_commas parameter [#1521](https://github.com/apache/datafusion-sqlparser-rs/pull/1521) (tomershaniii)
|
||||
- PartiQL queries in Redshift [#1534](https://github.com/apache/datafusion-sqlparser-rs/pull/1534) (yoavcloud)
|
||||
- Include license file in sqlparser_derive crate [#1543](https://github.com/apache/datafusion-sqlparser-rs/pull/1543) (ankane)
|
||||
- Fallback to identifier parsing if expression parsing fails [#1513](https://github.com/apache/datafusion-sqlparser-rs/pull/1513) (yoavcloud)
|
||||
- support `json_object('k':'v')` in postgres [#1546](https://github.com/apache/datafusion-sqlparser-rs/pull/1546) (lovasoa)
|
||||
- Document micro benchmarks [#1555](https://github.com/apache/datafusion-sqlparser-rs/pull/1555) (alamb)
|
||||
- Implement `Spanned` to retrieve source locations on AST nodes [#1435](https://github.com/apache/datafusion-sqlparser-rs/pull/1435) (Nyrox)
|
||||
- Fix error in benchmark queries [#1560](https://github.com/apache/datafusion-sqlparser-rs/pull/1560) (alamb)
|
||||
- Fix clippy warnings on rust 1.83 [#1570](https://github.com/apache/datafusion-sqlparser-rs/pull/1570) (iffyio)
|
||||
- Support relation visitor to visit the `Option` field [#1556](https://github.com/apache/datafusion-sqlparser-rs/pull/1556) (goldmedal)
|
||||
- Rename `TokenWithLocation` to `TokenWithSpan`, in backwards compatible way [#1562](https://github.com/apache/datafusion-sqlparser-rs/pull/1562) (alamb)
|
||||
- Support MySQL size variants for BLOB and TEXT columns [#1564](https://github.com/apache/datafusion-sqlparser-rs/pull/1564) (mvzink)
|
||||
- Increase version of sqlparser_derive from 0.2.2 to 0.3.0 [#1571](https://github.com/apache/datafusion-sqlparser-rs/pull/1571) (alamb)
|
||||
- `json_object('k' VALUE 'v')` in postgres [#1547](https://github.com/apache/datafusion-sqlparser-rs/pull/1547) (lovasoa)
|
||||
- Support snowflake double dot notation for object name [#1540](https://github.com/apache/datafusion-sqlparser-rs/pull/1540) (ayman-sigma)
|
||||
- Update comments / docs for `Spanned` [#1549](https://github.com/apache/datafusion-sqlparser-rs/pull/1549) (alamb)
|
||||
- Support Databricks struct literal [#1542](https://github.com/apache/datafusion-sqlparser-rs/pull/1542) (ayman-sigma)
|
||||
- Encapsulate CreateFunction [#1573](https://github.com/apache/datafusion-sqlparser-rs/pull/1573) (philipcristiano)
|
||||
- Support BIT column types [#1577](https://github.com/apache/datafusion-sqlparser-rs/pull/1577) (mvzink)
|
||||
- Support parsing optional nulls handling for unique constraint [#1567](https://github.com/apache/datafusion-sqlparser-rs/pull/1567) (mvzink)
|
||||
- Fix displaying WORK or TRANSACTION after BEGIN [#1565](https://github.com/apache/datafusion-sqlparser-rs/pull/1565) (mvzink)
|
||||
- Add support of the ENUM8|ENUM16 for ClickHouse dialect [#1574](https://github.com/apache/datafusion-sqlparser-rs/pull/1574) (git-hulk)
|
||||
- Parse Snowflake USE ROLE and USE SECONDARY ROLES [#1578](https://github.com/apache/datafusion-sqlparser-rs/pull/1578) (yoavcloud)
|
||||
- Snowflake ALTER TABLE clustering options [#1579](https://github.com/apache/datafusion-sqlparser-rs/pull/1579) (yoavcloud)
|
||||
- Support INSERT OVERWRITE INTO syntax [#1584](https://github.com/apache/datafusion-sqlparser-rs/pull/1584) (yuval-illumex)
|
||||
- Parse `INSERT` with subquery when lacking column names [#1586](https://github.com/apache/datafusion-sqlparser-rs/pull/1586) (iffyio)
|
||||
- Add support for ODBC functions [#1585](https://github.com/apache/datafusion-sqlparser-rs/pull/1585) (iffyio)
|
||||
|
||||
## Credits
|
||||
|
||||
Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor.
|
||||
|
||||
```
|
||||
8 Andrew Lamb
|
||||
6 Michael Victor Zink
|
||||
5 Ophir LOJKINE
|
||||
5 Yoav Cohen
|
||||
5 wugeer
|
||||
3 Ifeanyi Ubah
|
||||
3 gaoqiangz
|
||||
3 hulk
|
||||
2 Ayman Elkfrawy
|
||||
1 Andrew Kane
|
||||
1 Jax Liu
|
||||
1 Mark-Oliver Junge
|
||||
1 Philip Cristiano
|
||||
1 Yuval Shkolar
|
||||
1 delamarch3
|
||||
1 tomershaniii
|
||||
```
|
||||
|
||||
Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release.
|
||||
|
|
@ -1,118 +0,0 @@
|
|||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# sqlparser-rs 0.54.0 Changelog
|
||||
|
||||
This release consists of 57 commits from 24 contributors. See credits at the end of this changelog for more information.
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- feat: support `INSERT INTO [TABLE] FUNCTION` of Clickhouse [#1633](https://github.com/apache/datafusion-sqlparser-rs/pull/1633) (byte-sourcerer)
|
||||
|
||||
**Other:**
|
||||
|
||||
- Run cargo fmt on `derive` crate [#1595](https://github.com/apache/datafusion-sqlparser-rs/pull/1595) (alamb)
|
||||
- Add Apache license header to spans.rs [#1594](https://github.com/apache/datafusion-sqlparser-rs/pull/1594) (alamb)
|
||||
- Add support for BigQuery `ANY TYPE` data type [#1602](https://github.com/apache/datafusion-sqlparser-rs/pull/1602) (MartinSahlen)
|
||||
- Add support for TABLESAMPLE [#1580](https://github.com/apache/datafusion-sqlparser-rs/pull/1580) (yoavcloud)
|
||||
- Redshift: Fix parsing for quoted numbered columns [#1576](https://github.com/apache/datafusion-sqlparser-rs/pull/1576) (7phs)
|
||||
- Add the alter table ON COMMIT option to Snowflake [#1606](https://github.com/apache/datafusion-sqlparser-rs/pull/1606) (yoavcloud)
|
||||
- Support parsing `EXPLAIN ESTIMATE` of Clickhouse [#1605](https://github.com/apache/datafusion-sqlparser-rs/pull/1605) (byte-sourcerer)
|
||||
- Fix BigQuery hyphenated ObjectName with numbers [#1598](https://github.com/apache/datafusion-sqlparser-rs/pull/1598) (ayman-sigma)
|
||||
- Fix test compilation issue [#1609](https://github.com/apache/datafusion-sqlparser-rs/pull/1609) (iffyio)
|
||||
- Allow foreign table constraint without columns [#1608](https://github.com/apache/datafusion-sqlparser-rs/pull/1608) (ramnivas)
|
||||
- Support optional table for `ANALYZE` statement [#1599](https://github.com/apache/datafusion-sqlparser-rs/pull/1599) (yuyang-ok)
|
||||
- Support DOUBLE data types with precision for Mysql [#1611](https://github.com/apache/datafusion-sqlparser-rs/pull/1611) (artorias1024)
|
||||
- Add `#[recursive]` [#1522](https://github.com/apache/datafusion-sqlparser-rs/pull/1522) (blaginin)
|
||||
- Support arbitrary composite access expressions [#1600](https://github.com/apache/datafusion-sqlparser-rs/pull/1600) (ayman-sigma)
|
||||
- Consolidate `MapAccess`, and `Subscript` into `CompoundExpr` to handle the complex field access chain [#1551](https://github.com/apache/datafusion-sqlparser-rs/pull/1551) (goldmedal)
|
||||
- Handle empty projection in Postgres SELECT statements [#1613](https://github.com/apache/datafusion-sqlparser-rs/pull/1613) (tobyhede)
|
||||
- Merge composite and compound expr test cases [#1615](https://github.com/apache/datafusion-sqlparser-rs/pull/1615) (iffyio)
|
||||
- Support Snowflake Update-From-Select [#1604](https://github.com/apache/datafusion-sqlparser-rs/pull/1604) (yuval-illumex)
|
||||
- Improve parsing performance by reducing token cloning [#1587](https://github.com/apache/datafusion-sqlparser-rs/pull/1587) (davisp)
|
||||
- Improve Parser documentation [#1617](https://github.com/apache/datafusion-sqlparser-rs/pull/1617) (alamb)
|
||||
- Add support for DROP EXTENSION [#1610](https://github.com/apache/datafusion-sqlparser-rs/pull/1610) (ramnivas)
|
||||
- Refactor advancing token to avoid duplication, avoid borrow checker issues [#1618](https://github.com/apache/datafusion-sqlparser-rs/pull/1618) (alamb)
|
||||
- Fix the parsing result for the special double number [#1621](https://github.com/apache/datafusion-sqlparser-rs/pull/1621) (goldmedal)
|
||||
- SQLite: Allow dollar signs in placeholder names [#1620](https://github.com/apache/datafusion-sqlparser-rs/pull/1620) (hansott)
|
||||
- Improve error for an unexpected token after DROP [#1623](https://github.com/apache/datafusion-sqlparser-rs/pull/1623) (ramnivas)
|
||||
- Fix `sqlparser_bench` benchmark compilation [#1625](https://github.com/apache/datafusion-sqlparser-rs/pull/1625) (alamb)
|
||||
- Improve parsing speed by avoiding some clones in parse_identifier [#1624](https://github.com/apache/datafusion-sqlparser-rs/pull/1624) (alamb)
|
||||
- Simplify `parse_keyword_apis` more [#1626](https://github.com/apache/datafusion-sqlparser-rs/pull/1626) (alamb)
|
||||
- Test benchmarks and Improve benchmark README.md [#1627](https://github.com/apache/datafusion-sqlparser-rs/pull/1627) (alamb)
|
||||
- Add support for MYSQL's `RENAME TABLE` [#1616](https://github.com/apache/datafusion-sqlparser-rs/pull/1616) (wugeer)
|
||||
- Correctly tokenize nested comments [#1629](https://github.com/apache/datafusion-sqlparser-rs/pull/1629) (hansott)
|
||||
- Add support for USE SECONDARY ROLE (vs. ROLES) [#1637](https://github.com/apache/datafusion-sqlparser-rs/pull/1637) (yoavcloud)
|
||||
- Add support for various Snowflake grantees [#1640](https://github.com/apache/datafusion-sqlparser-rs/pull/1640) (yoavcloud)
|
||||
- Add support for the SQL OVERLAPS predicate [#1638](https://github.com/apache/datafusion-sqlparser-rs/pull/1638) (yoavcloud)
|
||||
- Add support for Snowflake LIST and REMOVE [#1639](https://github.com/apache/datafusion-sqlparser-rs/pull/1639) (yoavcloud)
|
||||
- Add support for MySQL's INSERT INTO ... SET syntax [#1641](https://github.com/apache/datafusion-sqlparser-rs/pull/1641) (yoavcloud)
|
||||
- Start new line if \r in Postgres dialect [#1647](https://github.com/apache/datafusion-sqlparser-rs/pull/1647) (hansott)
|
||||
- Support pluralized time units [#1630](https://github.com/apache/datafusion-sqlparser-rs/pull/1630) (wugeer)
|
||||
- Replace `ReferentialAction` enum in `DROP` statements [#1648](https://github.com/apache/datafusion-sqlparser-rs/pull/1648) (stepancheg)
|
||||
- Add support for MS-SQL BEGIN/END TRY/CATCH [#1649](https://github.com/apache/datafusion-sqlparser-rs/pull/1649) (yoavcloud)
|
||||
- Fix MySQL parsing of GRANT, REVOKE, and CREATE VIEW [#1538](https://github.com/apache/datafusion-sqlparser-rs/pull/1538) (mvzink)
|
||||
- Add support for the Snowflake MINUS set operator [#1652](https://github.com/apache/datafusion-sqlparser-rs/pull/1652) (yoavcloud)
|
||||
- ALTER TABLE DROP {COLUMN|CONSTRAINT} RESTRICT [#1651](https://github.com/apache/datafusion-sqlparser-rs/pull/1651) (stepancheg)
|
||||
- Add support for ClickHouse `FORMAT` on `INSERT` [#1628](https://github.com/apache/datafusion-sqlparser-rs/pull/1628) (bombsimon)
|
||||
- MsSQL SET for session params [#1646](https://github.com/apache/datafusion-sqlparser-rs/pull/1646) (yoavcloud)
|
||||
- Correctly look for end delimiter dollar quoted string [#1650](https://github.com/apache/datafusion-sqlparser-rs/pull/1650) (hansott)
|
||||
- Support single line comments starting with '#' for Hive [#1654](https://github.com/apache/datafusion-sqlparser-rs/pull/1654) (wugeer)
|
||||
- Support trailing commas in `FROM` clause [#1645](https://github.com/apache/datafusion-sqlparser-rs/pull/1645) (barsela1)
|
||||
- Allow empty options for BigQuery [#1657](https://github.com/apache/datafusion-sqlparser-rs/pull/1657) (MartinSahlen)
|
||||
- Add support for parsing RAISERROR [#1656](https://github.com/apache/datafusion-sqlparser-rs/pull/1656) (AvivDavid-Satori)
|
||||
- Add support for Snowflake column aliases that use SQL keywords [#1632](https://github.com/apache/datafusion-sqlparser-rs/pull/1632) (yoavcloud)
|
||||
- fix parsing of `INSERT INTO ... SELECT ... RETURNING ` [#1661](https://github.com/apache/datafusion-sqlparser-rs/pull/1661) (lovasoa)
|
||||
- Add support for `IS [NOT] [form] NORMALIZED` [#1655](https://github.com/apache/datafusion-sqlparser-rs/pull/1655) (alexander-beedie)
|
||||
- Add support for qualified column names in JOIN ... USING [#1663](https://github.com/apache/datafusion-sqlparser-rs/pull/1663) (yoavcloud)
|
||||
- Add support for Snowflake AT/BEFORE [#1667](https://github.com/apache/datafusion-sqlparser-rs/pull/1667) (yoavcloud)
|
||||
|
||||
## Credits
|
||||
|
||||
Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor.
|
||||
|
||||
```
|
||||
13 Yoav Cohen
|
||||
9 Andrew Lamb
|
||||
4 Hans Ott
|
||||
3 Ramnivas Laddad
|
||||
3 wugeer
|
||||
2 Ayman Elkfrawy
|
||||
2 Ifeanyi Ubah
|
||||
2 Jax Liu
|
||||
2 Martin Abelson Sahlen
|
||||
2 Stepan Koltsov
|
||||
2 cjw
|
||||
1 Aleksei Piianin
|
||||
1 Alexander Beedie
|
||||
1 AvivDavid-Satori
|
||||
1 Dmitrii Blaginin
|
||||
1 Michael Victor Zink
|
||||
1 Ophir LOJKINE
|
||||
1 Paul J. Davis
|
||||
1 Simon Sawert
|
||||
1 Toby Hede
|
||||
1 Yuval Shkolar
|
||||
1 artorias1024
|
||||
1 bar sela
|
||||
1 yuyang
|
||||
```
|
||||
|
||||
Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release.
|
||||
|
|
@ -1,173 +0,0 @@
|
|||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# sqlparser-rs 0.55.0 Changelog
|
||||
|
||||
This release consists of 55 commits from 25 contributors. See credits at the end of this changelog for more information.
|
||||
|
||||
## Migrating usages of `Expr::Value`
|
||||
|
||||
In v0.55 of sqlparser the `Expr::Value` enum variant contains a `ValueWithSpan` instead of a `Value`. Here is how to migrate.
|
||||
|
||||
### When pattern matching
|
||||
|
||||
```diff
|
||||
- Expr::Value(Value::SingleQuotedString(my_string)) => { ... }
|
||||
+ Expr::Value(ValueWithSpan{ value: Value::SingleQuotedString(my_string), span: _ }) => { ... }
|
||||
```
|
||||
|
||||
### When creating an `Expr`
|
||||
|
||||
Use the new `Expr::value` method (notice the lowercase `v`), which will create a `ValueWithSpan` containing an empty span:
|
||||
|
||||
```diff
|
||||
- Expr::Value(Value::SingleQuotedString(my_string))
|
||||
+ Expr::value(Value::SingleQuotedString(my_string))
|
||||
```
|
||||
|
||||
## Migrating usages of `ObjectName`
|
||||
|
||||
In v0.55 of sqlparser, the `ObjectName` structure has been changed as shown below. Here is now to migrate.
|
||||
|
||||
```diff
|
||||
- pub struct ObjectName(pub Vec<Ident>);
|
||||
+ pub struct ObjectName(pub Vec<ObjectNamePart>)
|
||||
```
|
||||
|
||||
### When constructing `ObjectName`
|
||||
|
||||
Use the `From` impl:
|
||||
|
||||
```diff
|
||||
- name: ObjectName(vec![Ident::new("f")]),
|
||||
+ name: ObjectName::from(vec![Ident::new("f")]),
|
||||
```
|
||||
|
||||
### Accessing Spans
|
||||
|
||||
Use the `span()` function
|
||||
|
||||
```diff
|
||||
- name.span
|
||||
+ name.span()
|
||||
```
|
||||
|
||||
|
||||
|
||||
**Breaking changes:**
|
||||
|
||||
- Enhance object name path segments [#1539](https://github.com/apache/datafusion-sqlparser-rs/pull/1539) (ayman-sigma)
|
||||
- Store spans for Value expressions [#1738](https://github.com/apache/datafusion-sqlparser-rs/pull/1738) (lovasoa)
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- feat: adjust create and drop trigger for mysql dialect [#1734](https://github.com/apache/datafusion-sqlparser-rs/pull/1734) (invm)
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- fix: make `serde` feature no_std [#1730](https://github.com/apache/datafusion-sqlparser-rs/pull/1730) (iajoiner)
|
||||
|
||||
**Other:**
|
||||
|
||||
- Update rat_exclude_file.txt [#1670](https://github.com/apache/datafusion-sqlparser-rs/pull/1670) (alamb)
|
||||
- Add support for Snowflake account privileges [#1666](https://github.com/apache/datafusion-sqlparser-rs/pull/1666) (yoavcloud)
|
||||
- Add support for Create Iceberg Table statement for Snowflake parser [#1664](https://github.com/apache/datafusion-sqlparser-rs/pull/1664) (Vedin)
|
||||
- National strings: check if dialect supports backslash escape [#1672](https://github.com/apache/datafusion-sqlparser-rs/pull/1672) (hansott)
|
||||
- Only support escape literals for Postgres, Redshift and generic dialect [#1674](https://github.com/apache/datafusion-sqlparser-rs/pull/1674) (hansott)
|
||||
- BigQuery: Support trailing commas in column definitions list [#1682](https://github.com/apache/datafusion-sqlparser-rs/pull/1682) (iffyio)
|
||||
- Enable GROUP BY exp for Snowflake dialect [#1683](https://github.com/apache/datafusion-sqlparser-rs/pull/1683) (yoavcloud)
|
||||
- Add support for parsing empty dictionary expressions [#1684](https://github.com/apache/datafusion-sqlparser-rs/pull/1684) (yoavcloud)
|
||||
- Support multiple tables in `UPDATE FROM` clause [#1681](https://github.com/apache/datafusion-sqlparser-rs/pull/1681) (iffyio)
|
||||
- Add support for mysql table hints [#1675](https://github.com/apache/datafusion-sqlparser-rs/pull/1675) (AvivDavid-Satori)
|
||||
- BigQuery: Add support for select expr star [#1680](https://github.com/apache/datafusion-sqlparser-rs/pull/1680) (iffyio)
|
||||
- Support underscore separators in numbers for Clickhouse. Fixes #1659 [#1677](https://github.com/apache/datafusion-sqlparser-rs/pull/1677) (graup)
|
||||
- BigQuery: Fix column identifier reserved keywords list [#1678](https://github.com/apache/datafusion-sqlparser-rs/pull/1678) (iffyio)
|
||||
- Fix bug when parsing a Snowflake stage with `;` suffix [#1688](https://github.com/apache/datafusion-sqlparser-rs/pull/1688) (yoavcloud)
|
||||
- Allow plain JOIN without turning it into INNER [#1692](https://github.com/apache/datafusion-sqlparser-rs/pull/1692) (mvzink)
|
||||
- Fix DDL generation in case of an empty arguments function. [#1690](https://github.com/apache/datafusion-sqlparser-rs/pull/1690) (remysaissy)
|
||||
- Fix `CREATE FUNCTION` round trip for Hive dialect [#1693](https://github.com/apache/datafusion-sqlparser-rs/pull/1693) (iffyio)
|
||||
- Make numeric literal underscore test dialect agnostic [#1685](https://github.com/apache/datafusion-sqlparser-rs/pull/1685) (iffyio)
|
||||
- Extend lambda support for ClickHouse and DuckDB dialects [#1686](https://github.com/apache/datafusion-sqlparser-rs/pull/1686) (gstvg)
|
||||
- Make TypedString preserve quote style [#1679](https://github.com/apache/datafusion-sqlparser-rs/pull/1679) (graup)
|
||||
- Do not parse ASOF and MATCH_CONDITION as table factor aliases [#1698](https://github.com/apache/datafusion-sqlparser-rs/pull/1698) (yoavcloud)
|
||||
- Add support for GRANT on some common Snowflake objects [#1699](https://github.com/apache/datafusion-sqlparser-rs/pull/1699) (yoavcloud)
|
||||
- Add RETURNS TABLE() support for CREATE FUNCTION in Postgresql [#1687](https://github.com/apache/datafusion-sqlparser-rs/pull/1687) (remysaissy)
|
||||
- Add parsing for GRANT ROLE and GRANT DATABASE ROLE in Snowflake dialect [#1689](https://github.com/apache/datafusion-sqlparser-rs/pull/1689) (yoavcloud)
|
||||
- Add support for `CREATE/ALTER/DROP CONNECTOR` syntax [#1701](https://github.com/apache/datafusion-sqlparser-rs/pull/1701) (wugeer)
|
||||
- Parse Snowflake COPY INTO <location> [#1669](https://github.com/apache/datafusion-sqlparser-rs/pull/1669) (yoavcloud)
|
||||
- Require space after -- to start single line comment in MySQL [#1705](https://github.com/apache/datafusion-sqlparser-rs/pull/1705) (hansott)
|
||||
- Add suppport for Show Objects statement for the Snowflake parser [#1702](https://github.com/apache/datafusion-sqlparser-rs/pull/1702) (DanCodedThis)
|
||||
- Fix incorrect parsing of JsonAccess bracket notation after cast in Snowflake [#1708](https://github.com/apache/datafusion-sqlparser-rs/pull/1708) (yoavcloud)
|
||||
- Parse Postgres VARBIT datatype [#1703](https://github.com/apache/datafusion-sqlparser-rs/pull/1703) (mvzink)
|
||||
- Implement FROM-first selects [#1713](https://github.com/apache/datafusion-sqlparser-rs/pull/1713) (mitsuhiko)
|
||||
- Enable custom dialects to support `MATCH() AGAINST()` [#1719](https://github.com/apache/datafusion-sqlparser-rs/pull/1719) (joocer)
|
||||
- Support group by cube/rollup etc in BigQuery [#1720](https://github.com/apache/datafusion-sqlparser-rs/pull/1720) (Groennbeck)
|
||||
- Add support for MS Varbinary(MAX) (#1714) [#1715](https://github.com/apache/datafusion-sqlparser-rs/pull/1715) (TylerBrinks)
|
||||
- Add supports for Hive's `SELECT ... GROUP BY .. GROUPING SETS` syntax [#1653](https://github.com/apache/datafusion-sqlparser-rs/pull/1653) (wugeer)
|
||||
- Differentiate LEFT JOIN from LEFT OUTER JOIN [#1726](https://github.com/apache/datafusion-sqlparser-rs/pull/1726) (mvzink)
|
||||
- Add support for Postgres `ALTER TYPE` [#1727](https://github.com/apache/datafusion-sqlparser-rs/pull/1727) (jvatic)
|
||||
- Replace `Method` and `CompositeAccess` with `CompoundFieldAccess` [#1716](https://github.com/apache/datafusion-sqlparser-rs/pull/1716) (iffyio)
|
||||
- Add support for `EXECUTE IMMEDIATE` [#1717](https://github.com/apache/datafusion-sqlparser-rs/pull/1717) (iffyio)
|
||||
- Treat COLLATE like any other column option [#1731](https://github.com/apache/datafusion-sqlparser-rs/pull/1731) (mvzink)
|
||||
- Add support for PostgreSQL/Redshift geometric operators [#1723](https://github.com/apache/datafusion-sqlparser-rs/pull/1723) (benrsatori)
|
||||
- Implement SnowFlake ALTER SESSION [#1712](https://github.com/apache/datafusion-sqlparser-rs/pull/1712) (osipovartem)
|
||||
- Extend Visitor trait for Value type [#1725](https://github.com/apache/datafusion-sqlparser-rs/pull/1725) (tomershaniii)
|
||||
- Add support for `ORDER BY ALL` [#1724](https://github.com/apache/datafusion-sqlparser-rs/pull/1724) (PokIsemaine)
|
||||
- Parse casting to array using double colon operator in Redshift [#1737](https://github.com/apache/datafusion-sqlparser-rs/pull/1737) (yoavcloud)
|
||||
- Replace parallel condition/result vectors with single CaseWhen vector in Expr::Case. This fixes the iteration order when using the `Visitor` trait. Expressions are now visited in the same order as they appear in the sql source. [#1733](https://github.com/apache/datafusion-sqlparser-rs/pull/1733) (lovasoa)
|
||||
- BigQuery: Add support for `BEGIN` [#1718](https://github.com/apache/datafusion-sqlparser-rs/pull/1718) (iffyio)
|
||||
- Parse SIGNED INTEGER type in MySQL CAST [#1739](https://github.com/apache/datafusion-sqlparser-rs/pull/1739) (mvzink)
|
||||
- Parse MySQL ALTER TABLE ALGORITHM option [#1745](https://github.com/apache/datafusion-sqlparser-rs/pull/1745) (mvzink)
|
||||
- Random test cleanups use Expr::value [#1749](https://github.com/apache/datafusion-sqlparser-rs/pull/1749) (alamb)
|
||||
- Parse ALTER TABLE AUTO_INCREMENT operation for MySQL [#1748](https://github.com/apache/datafusion-sqlparser-rs/pull/1748) (mvzink)
|
||||
|
||||
## Credits
|
||||
|
||||
Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor.
|
||||
|
||||
```
|
||||
10 Yoav Cohen
|
||||
9 Ifeanyi Ubah
|
||||
7 Michael Victor Zink
|
||||
3 Hans Ott
|
||||
2 Andrew Lamb
|
||||
2 Ophir LOJKINE
|
||||
2 Paul Grau
|
||||
2 Rémy SAISSY
|
||||
2 wugeer
|
||||
1 Armin Ronacher
|
||||
1 Artem Osipov
|
||||
1 AvivDavid-Satori
|
||||
1 Ayman Elkfrawy
|
||||
1 DanCodedThis
|
||||
1 Denys Tsomenko
|
||||
1 Emil
|
||||
1 Ian Alexander Joiner
|
||||
1 Jesse Stuart
|
||||
1 Justin Joyce
|
||||
1 Michael
|
||||
1 SiLe Zhou
|
||||
1 Tyler Brinks
|
||||
1 benrsatori
|
||||
1 gstvg
|
||||
1 tomershaniii
|
||||
```
|
||||
|
||||
Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release.
|
||||
|
|
@ -1,102 +0,0 @@
|
|||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# sqlparser-rs 0.56.0 Changelog
|
||||
|
||||
This release consists of 48 commits from 19 contributors. See credits at the end of this changelog for more information.
|
||||
|
||||
**Other:**
|
||||
|
||||
- Ignore escaped LIKE wildcards in MySQL [#1735](https://github.com/apache/datafusion-sqlparser-rs/pull/1735) (mvzink)
|
||||
- Parse SET NAMES syntax in Postgres [#1752](https://github.com/apache/datafusion-sqlparser-rs/pull/1752) (mvzink)
|
||||
- re-add support for nested comments in mssql [#1754](https://github.com/apache/datafusion-sqlparser-rs/pull/1754) (lovasoa)
|
||||
- Extend support for INDEX parsing [#1707](https://github.com/apache/datafusion-sqlparser-rs/pull/1707) (LucaCappelletti94)
|
||||
- Parse MySQL `ALTER TABLE DROP FOREIGN KEY` syntax [#1762](https://github.com/apache/datafusion-sqlparser-rs/pull/1762) (mvzink)
|
||||
- add support for `with` clauses (CTEs) in `delete` statements [#1764](https://github.com/apache/datafusion-sqlparser-rs/pull/1764) (lovasoa)
|
||||
- SET with a list of comma separated assignments [#1757](https://github.com/apache/datafusion-sqlparser-rs/pull/1757) (MohamedAbdeen21)
|
||||
- Preserve MySQL-style `LIMIT <offset>, <limit>` syntax [#1765](https://github.com/apache/datafusion-sqlparser-rs/pull/1765) (mvzink)
|
||||
- Add support for `DROP MATERIALIZED VIEW` [#1743](https://github.com/apache/datafusion-sqlparser-rs/pull/1743) (iffyio)
|
||||
- Add `CASE` and `IF` statement support [#1741](https://github.com/apache/datafusion-sqlparser-rs/pull/1741) (iffyio)
|
||||
- BigQuery: Add support for `CREATE SCHEMA` options [#1742](https://github.com/apache/datafusion-sqlparser-rs/pull/1742) (iffyio)
|
||||
- Snowflake: Support dollar quoted comments [#1755](https://github.com/apache/datafusion-sqlparser-rs/pull/1755)
|
||||
- Add LOCK operation for ALTER TABLE [#1768](https://github.com/apache/datafusion-sqlparser-rs/pull/1768) (MohamedAbdeen21)
|
||||
- Add support for `RAISE` statement [#1766](https://github.com/apache/datafusion-sqlparser-rs/pull/1766) (iffyio)
|
||||
- Add GLOBAL context/modifier to SET statements [#1767](https://github.com/apache/datafusion-sqlparser-rs/pull/1767) (MohamedAbdeen21)
|
||||
- Parse `SUBSTR` as alias for `SUBSTRING` [#1769](https://github.com/apache/datafusion-sqlparser-rs/pull/1769) (mvzink)
|
||||
- SET statements: scope modifier for multiple assignments [#1772](https://github.com/apache/datafusion-sqlparser-rs/pull/1772) (MohamedAbdeen21)
|
||||
- Support qualified column names in `MATCH AGAINST` clause [#1774](https://github.com/apache/datafusion-sqlparser-rs/pull/1774) (tomershaniii)
|
||||
- Mysql: Add support for := operator [#1779](https://github.com/apache/datafusion-sqlparser-rs/pull/1779) (barsela1)
|
||||
- Add cipherstash-proxy to list of users in README.md [#1782](https://github.com/apache/datafusion-sqlparser-rs/pull/1782) (coderdan)
|
||||
- Fix typos [#1785](https://github.com/apache/datafusion-sqlparser-rs/pull/1785) (jayvdb)
|
||||
- Add support for Databricks TIMESTAMP_NTZ. [#1781](https://github.com/apache/datafusion-sqlparser-rs/pull/1781) (romanb)
|
||||
- Enable double-dot-notation for mssql. [#1787](https://github.com/apache/datafusion-sqlparser-rs/pull/1787) (romanb)
|
||||
- Fix: Snowflake ALTER SESSION cannot be followed by other statements. [#1786](https://github.com/apache/datafusion-sqlparser-rs/pull/1786) (romanb)
|
||||
- Add GreptimeDB to the "Users" in README [#1788](https://github.com/apache/datafusion-sqlparser-rs/pull/1788) (MichaelScofield)
|
||||
- Extend snowflake grant options support [#1794](https://github.com/apache/datafusion-sqlparser-rs/pull/1794) (yoavcloud)
|
||||
- Fix clippy lint on rust 1.86 [#1796](https://github.com/apache/datafusion-sqlparser-rs/pull/1796) (iffyio)
|
||||
- Allow single quotes in EXTRACT() for Redshift. [#1795](https://github.com/apache/datafusion-sqlparser-rs/pull/1795) (romanb)
|
||||
- MSSQL: Add support for functionality `MERGE` output clause [#1790](https://github.com/apache/datafusion-sqlparser-rs/pull/1790) (dilovancelik)
|
||||
- Support additional DuckDB integer types such as HUGEINT, UHUGEINT, etc [#1797](https://github.com/apache/datafusion-sqlparser-rs/pull/1797) (alexander-beedie)
|
||||
- Add support for MSSQL IF/ELSE statements. [#1791](https://github.com/apache/datafusion-sqlparser-rs/pull/1791) (romanb)
|
||||
- Allow literal backslash escapes for string literals in Redshift dialect. [#1801](https://github.com/apache/datafusion-sqlparser-rs/pull/1801) (romanb)
|
||||
- Add support for MySQL's STRAIGHT_JOIN join operator. [#1802](https://github.com/apache/datafusion-sqlparser-rs/pull/1802) (romanb)
|
||||
- Snowflake COPY INTO target columns, select items and optional alias [#1805](https://github.com/apache/datafusion-sqlparser-rs/pull/1805) (yoavcloud)
|
||||
- Fix tokenization of qualified identifiers with numeric prefix. [#1803](https://github.com/apache/datafusion-sqlparser-rs/pull/1803) (romanb)
|
||||
- Add support for `INHERITS` option in `CREATE TABLE` statement [#1806](https://github.com/apache/datafusion-sqlparser-rs/pull/1806) (LucaCappelletti94)
|
||||
- Add `DROP TRIGGER` support for SQL Server [#1813](https://github.com/apache/datafusion-sqlparser-rs/pull/1813) (aharpervc)
|
||||
- Snowflake: support nested join without parentheses [#1799](https://github.com/apache/datafusion-sqlparser-rs/pull/1799) (barsela1)
|
||||
- Add support for parenthesized subquery as `IN` predicate [#1793](https://github.com/apache/datafusion-sqlparser-rs/pull/1793) (adamchainz)
|
||||
- Fix `STRAIGHT_JOIN` constraint when table alias is absent [#1812](https://github.com/apache/datafusion-sqlparser-rs/pull/1812) (killertux)
|
||||
- Add support for `PRINT` statement for SQL Server [#1811](https://github.com/apache/datafusion-sqlparser-rs/pull/1811) (aharpervc)
|
||||
- enable `supports_filter_during_aggregation` for Generic dialect [#1815](https://github.com/apache/datafusion-sqlparser-rs/pull/1815) (goldmedal)
|
||||
- Add support for `XMLTABLE` [#1817](https://github.com/apache/datafusion-sqlparser-rs/pull/1817) (lovasoa)
|
||||
- Add `CREATE FUNCTION` support for SQL Server [#1808](https://github.com/apache/datafusion-sqlparser-rs/pull/1808) (aharpervc)
|
||||
- Add `OR ALTER` support for `CREATE VIEW` [#1818](https://github.com/apache/datafusion-sqlparser-rs/pull/1818) (aharpervc)
|
||||
- Add `DECLARE ... CURSOR FOR` support for SQL Server [#1821](https://github.com/apache/datafusion-sqlparser-rs/pull/1821) (aharpervc)
|
||||
- Handle missing login in changelog generate script [#1823](https://github.com/apache/datafusion-sqlparser-rs/pull/1823) (iffyio)
|
||||
- Snowflake: Add support for `CONNECT_BY_ROOT` [#1780](https://github.com/apache/datafusion-sqlparser-rs/pull/1780) (tomershaniii)
|
||||
|
||||
## Credits
|
||||
|
||||
Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor.
|
||||
|
||||
```
|
||||
8 Roman Borschel
|
||||
6 Ifeanyi Ubah
|
||||
5 Andrew Harper
|
||||
5 Michael Victor Zink
|
||||
4 Mohamed Abdeen
|
||||
3 Ophir LOJKINE
|
||||
2 Luca Cappelletti
|
||||
2 Yoav Cohen
|
||||
2 bar sela
|
||||
2 tomershaniii
|
||||
1 Adam Johnson
|
||||
1 Aleksei Piianin
|
||||
1 Alexander Beedie
|
||||
1 Bruno Clemente
|
||||
1 Dan Draper
|
||||
1 DilovanCelik
|
||||
1 Jax Liu
|
||||
1 John Vandenberg
|
||||
1 LFC
|
||||
```
|
||||
|
||||
Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release.
|
||||
|
|
@ -1,95 +0,0 @@
|
|||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# sqlparser-rs 0.57.0 Changelog
|
||||
|
||||
This release consists of 39 commits from 19 contributors. See credits at the end of this changelog for more information.
|
||||
|
||||
**Implemented enhancements:**
|
||||
|
||||
- feat: Hive: support `SORT BY` direction [#1873](https://github.com/apache/datafusion-sqlparser-rs/pull/1873) (chenkovsky)
|
||||
|
||||
**Other:**
|
||||
|
||||
- Support some of pipe operators [#1759](https://github.com/apache/datafusion-sqlparser-rs/pull/1759) (simonvandel)
|
||||
- Added support for `DROP DOMAIN` [#1828](https://github.com/apache/datafusion-sqlparser-rs/pull/1828) (LucaCappelletti94)
|
||||
- Improve support for cursors for SQL Server [#1831](https://github.com/apache/datafusion-sqlparser-rs/pull/1831) (aharpervc)
|
||||
- Add all missing table options to be handled in any order [#1747](https://github.com/apache/datafusion-sqlparser-rs/pull/1747) (benrsatori)
|
||||
- Add `CREATE TRIGGER` support for SQL Server [#1810](https://github.com/apache/datafusion-sqlparser-rs/pull/1810) (aharpervc)
|
||||
- Added support for `CREATE DOMAIN` [#1830](https://github.com/apache/datafusion-sqlparser-rs/pull/1830) (LucaCappelletti94)
|
||||
- Allow stored procedures to be defined without `BEGIN`/`END` [#1834](https://github.com/apache/datafusion-sqlparser-rs/pull/1834) (aharpervc)
|
||||
- Add support for the MATCH and REGEXP binary operators [#1840](https://github.com/apache/datafusion-sqlparser-rs/pull/1840) (lovasoa)
|
||||
- Fix: parsing ident starting with underscore in certain dialects [#1835](https://github.com/apache/datafusion-sqlparser-rs/pull/1835) (MohamedAbdeen21)
|
||||
- implement pretty-printing with `{:#}` [#1847](https://github.com/apache/datafusion-sqlparser-rs/pull/1847) (lovasoa)
|
||||
- Fix big performance issue in string serialization [#1848](https://github.com/apache/datafusion-sqlparser-rs/pull/1848) (lovasoa)
|
||||
- Add support for `DENY` statements [#1836](https://github.com/apache/datafusion-sqlparser-rs/pull/1836) (aharpervc)
|
||||
- Postgresql: Add `REPLICA IDENTITY` operation for `ALTER TABLE` [#1844](https://github.com/apache/datafusion-sqlparser-rs/pull/1844) (MohamedAbdeen21)
|
||||
- Add support for INCLUDE/EXCLUDE NULLS for UNPIVOT [#1849](https://github.com/apache/datafusion-sqlparser-rs/pull/1849) (Vedin)
|
||||
- pretty print improvements [#1851](https://github.com/apache/datafusion-sqlparser-rs/pull/1851) (lovasoa)
|
||||
- fix new rust 1.87 cargo clippy warnings [#1856](https://github.com/apache/datafusion-sqlparser-rs/pull/1856) (lovasoa)
|
||||
- Update criterion requirement from 0.5 to 0.6 in /sqlparser_bench [#1857](https://github.com/apache/datafusion-sqlparser-rs/pull/1857) (dependabot[bot])
|
||||
- pretty-print CREATE TABLE statements [#1854](https://github.com/apache/datafusion-sqlparser-rs/pull/1854) (lovasoa)
|
||||
- pretty-print CREATE VIEW statements [#1855](https://github.com/apache/datafusion-sqlparser-rs/pull/1855) (lovasoa)
|
||||
- Handle optional datatypes properly in `CREATE FUNCTION` statements [#1826](https://github.com/apache/datafusion-sqlparser-rs/pull/1826) (LucaCappelletti94)
|
||||
- Mysql: Add `SRID` column option [#1852](https://github.com/apache/datafusion-sqlparser-rs/pull/1852) (MohamedAbdeen21)
|
||||
- Add support for table valued functions for SQL Server [#1839](https://github.com/apache/datafusion-sqlparser-rs/pull/1839) (aharpervc)
|
||||
- Keep the COLUMN keyword only if it exists when dropping the column [#1862](https://github.com/apache/datafusion-sqlparser-rs/pull/1862) (git-hulk)
|
||||
- Add support for parameter default values in SQL Server [#1866](https://github.com/apache/datafusion-sqlparser-rs/pull/1866) (aharpervc)
|
||||
- Add support for `TABLESAMPLE` pipe operator [#1860](https://github.com/apache/datafusion-sqlparser-rs/pull/1860) (hendrikmakait)
|
||||
- Adds support for mysql's drop index [#1864](https://github.com/apache/datafusion-sqlparser-rs/pull/1864) (dmzmk)
|
||||
- Fix: GROUPING SETS accept values without parenthesis [#1867](https://github.com/apache/datafusion-sqlparser-rs/pull/1867) (Vedin)
|
||||
- Add ICEBERG keyword support to ALTER TABLE statement [#1869](https://github.com/apache/datafusion-sqlparser-rs/pull/1869) (osipovartem)
|
||||
- MySQL: Support `index_name` in FK constraints [#1871](https://github.com/apache/datafusion-sqlparser-rs/pull/1871) (MohamedAbdeen21)
|
||||
- Postgres: Apply `ONLY` keyword per table in TRUNCATE stmt [#1872](https://github.com/apache/datafusion-sqlparser-rs/pull/1872) (MohamedAbdeen21)
|
||||
- Fix `CASE` expression spans [#1874](https://github.com/apache/datafusion-sqlparser-rs/pull/1874) (eliaperantoni)
|
||||
- MySQL: `[[NOT] ENFORCED]` in CHECK constraint [#1870](https://github.com/apache/datafusion-sqlparser-rs/pull/1870) (MohamedAbdeen21)
|
||||
- Add support for `CREATE SCHEMA WITH ( <properties> )` [#1877](https://github.com/apache/datafusion-sqlparser-rs/pull/1877) (utay)
|
||||
- Add support for `ALTER TABLE DROP INDEX` [#1865](https://github.com/apache/datafusion-sqlparser-rs/pull/1865) (vimko)
|
||||
- chore: Replace archived actions-rs/install action [#1876](https://github.com/apache/datafusion-sqlparser-rs/pull/1876) (assignUser)
|
||||
- Allow `IF NOT EXISTS` after table name for Snowflake [#1881](https://github.com/apache/datafusion-sqlparser-rs/pull/1881) (bombsimon)
|
||||
- Support `DISTINCT AS { STRUCT | VALUE }` for BigQuery [#1880](https://github.com/apache/datafusion-sqlparser-rs/pull/1880) (bombsimon)
|
||||
|
||||
## Credits
|
||||
|
||||
Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor.
|
||||
|
||||
```
|
||||
7 Ophir LOJKINE
|
||||
6 Andrew Harper
|
||||
6 Mohamed Abdeen
|
||||
3 Luca Cappelletti
|
||||
2 Denys Tsomenko
|
||||
2 Simon Sawert
|
||||
1 Andrew Lamb
|
||||
1 Artem Osipov
|
||||
1 Chen Chongchen
|
||||
1 Dmitriy Mazurin
|
||||
1 Elia Perantoni
|
||||
1 Hendrik Makait
|
||||
1 Jacob Wujciak-Jens
|
||||
1 Simon Vandel Sillesen
|
||||
1 Yannick Utard
|
||||
1 benrsatori
|
||||
1 dependabot[bot]
|
||||
1 hulk
|
||||
1 vimko
|
||||
```
|
||||
|
||||
Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release.
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
[package]
|
||||
name = "sqlparser_derive"
|
||||
description = "Procedural (proc) macros for sqlparser"
|
||||
version = "0.3.0"
|
||||
authors = ["sqlparser-rs authors"]
|
||||
homepage = "https://github.com/sqlparser-rs/sqlparser-rs"
|
||||
documentation = "https://docs.rs/sqlparser_derive/"
|
||||
keywords = ["ansi", "sql", "lexer", "parser"]
|
||||
repository = "https://github.com/sqlparser-rs/sqlparser-rs"
|
||||
license = "Apache-2.0"
|
||||
include = [
|
||||
"src/**/*.rs",
|
||||
"Cargo.toml",
|
||||
"LICENSE.TXT",
|
||||
]
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
syn = { version = "2.0", default-features = false, features = ["printing", "parsing", "derive", "proc-macro"] }
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
|
@ -1 +0,0 @@
|
|||
../LICENSE.TXT
|
217
derive/README.md
217
derive/README.md
|
@ -1,217 +0,0 @@
|
|||
<!---
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# SQL Parser Derive Macro
|
||||
|
||||
## Visit
|
||||
|
||||
This crate contains a procedural macro that can automatically derive
|
||||
implementations of the `Visit` trait in the [sqlparser](https://crates.io/crates/sqlparser) crate
|
||||
|
||||
```rust
|
||||
#[derive(Visit, VisitMut)]
|
||||
struct Foo {
|
||||
boolean: bool,
|
||||
bar: Bar,
|
||||
}
|
||||
|
||||
#[derive(Visit, VisitMut)]
|
||||
enum Bar {
|
||||
A(),
|
||||
B(String, bool),
|
||||
C { named: i32 },
|
||||
}
|
||||
```
|
||||
|
||||
Will generate code akin to
|
||||
|
||||
```rust
|
||||
impl Visit for Foo {
|
||||
fn visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
self.boolean.visit(visitor)?;
|
||||
self.bar.visit(visitor)?;
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Visit for Bar {
|
||||
fn visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
match self {
|
||||
Self::A() => {}
|
||||
Self::B(_1, _2) => {
|
||||
_1.visit(visitor)?;
|
||||
_2.visit(visitor)?;
|
||||
}
|
||||
Self::C { named } => {
|
||||
named.visit(visitor)?;
|
||||
}
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Some types may wish to call a corresponding method on the visitor:
|
||||
|
||||
```rust
|
||||
#[derive(Visit, VisitMut)]
|
||||
#[visit(with = "visit_expr")]
|
||||
enum Expr {
|
||||
IsNull(Box<Expr>),
|
||||
..
|
||||
}
|
||||
```
|
||||
|
||||
This will result in the following sequence of visitor calls when an `IsNull`
|
||||
expression is visited
|
||||
|
||||
```
|
||||
visitor.pre_visit_expr(<is null expr>)
|
||||
visitor.pre_visit_expr(<is null operand>)
|
||||
visitor.post_visit_expr(<is null operand>)
|
||||
visitor.post_visit_expr(<is null expr>)
|
||||
```
|
||||
|
||||
For some types it is only appropriate to call a particular visitor method in
|
||||
some contexts. For example, not every `ObjectName` refers to a relation.
|
||||
|
||||
In these cases, the `visit` attribute can be used on the field for which we'd
|
||||
like to call the method:
|
||||
|
||||
```rust
|
||||
#[derive(Visit, VisitMut)]
|
||||
#[visit(with = "visit_table_factor")]
|
||||
pub enum TableFactor {
|
||||
Table {
|
||||
#[visit(with = "visit_relation")]
|
||||
name: ObjectName,
|
||||
alias: Option<TableAlias>,
|
||||
},
|
||||
..
|
||||
}
|
||||
```
|
||||
|
||||
This will generate
|
||||
|
||||
```rust
|
||||
impl Visit for TableFactor {
|
||||
fn visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
visitor.pre_visit_table_factor(self)?;
|
||||
match self {
|
||||
Self::Table { name, alias } => {
|
||||
visitor.pre_visit_relation(name)?;
|
||||
name.visit(visitor)?;
|
||||
visitor.post_visit_relation(name)?;
|
||||
alias.visit(visitor)?;
|
||||
}
|
||||
}
|
||||
visitor.post_visit_table_factor(self)?;
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Note that annotating both the type and the field is incorrect as it will result
|
||||
in redundant calls to the method. For example
|
||||
|
||||
```rust
|
||||
#[derive(Visit, VisitMut)]
|
||||
#[visit(with = "visit_expr")]
|
||||
enum Expr {
|
||||
IsNull(#[visit(with = "visit_expr")] Box<Expr>),
|
||||
..
|
||||
}
|
||||
```
|
||||
|
||||
will result in these calls to the visitor
|
||||
|
||||
|
||||
```
|
||||
visitor.pre_visit_expr(<is null expr>)
|
||||
visitor.pre_visit_expr(<is null operand>)
|
||||
visitor.pre_visit_expr(<is null operand>)
|
||||
visitor.post_visit_expr(<is null operand>)
|
||||
visitor.post_visit_expr(<is null operand>)
|
||||
visitor.post_visit_expr(<is null expr>)
|
||||
```
|
||||
|
||||
If the field is a `Option` and add `#[with = "visit_xxx"]` to the field, the generated code
|
||||
will try to access the field only if it is `Some`:
|
||||
|
||||
```rust
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct ShowStatementIn {
|
||||
pub clause: ShowStatementInClause,
|
||||
pub parent_type: Option<ShowStatementInParentType>,
|
||||
#[cfg_attr(feature = "visitor", visit(with = "visit_relation"))]
|
||||
pub parent_name: Option<ObjectName>,
|
||||
}
|
||||
```
|
||||
|
||||
This will generate
|
||||
|
||||
```rust
|
||||
impl sqlparser::ast::Visit for ShowStatementIn {
|
||||
fn visit<V: sqlparser::ast::Visitor>(
|
||||
&self,
|
||||
visitor: &mut V,
|
||||
) -> ::std::ops::ControlFlow<V::Break> {
|
||||
sqlparser::ast::Visit::visit(&self.clause, visitor)?;
|
||||
sqlparser::ast::Visit::visit(&self.parent_type, visitor)?;
|
||||
if let Some(value) = &self.parent_name {
|
||||
visitor.pre_visit_relation(value)?;
|
||||
sqlparser::ast::Visit::visit(value, visitor)?;
|
||||
visitor.post_visit_relation(value)?;
|
||||
}
|
||||
::std::ops::ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
|
||||
impl sqlparser::ast::VisitMut for ShowStatementIn {
|
||||
fn visit<V: sqlparser::ast::VisitorMut>(
|
||||
&mut self,
|
||||
visitor: &mut V,
|
||||
) -> ::std::ops::ControlFlow<V::Break> {
|
||||
sqlparser::ast::VisitMut::visit(&mut self.clause, visitor)?;
|
||||
sqlparser::ast::VisitMut::visit(&mut self.parent_type, visitor)?;
|
||||
if let Some(value) = &mut self.parent_name {
|
||||
visitor.pre_visit_relation(value)?;
|
||||
sqlparser::ast::VisitMut::visit(value, visitor)?;
|
||||
visitor.post_visit_relation(value)?;
|
||||
}
|
||||
::std::ops::ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Releasing
|
||||
|
||||
This crate's release is not automated. Instead it is released manually as needed
|
||||
|
||||
Steps:
|
||||
1. Update the version in `Cargo.toml`
|
||||
2. Update the corresponding version in `../Cargo.toml`
|
||||
3. Commit via PR
|
||||
4. Publish to crates.io:
|
||||
|
||||
```shell
|
||||
# update to latest checked in main branch and publish via
|
||||
cargo publish
|
||||
```
|
||||
|
|
@ -1,291 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{format_ident, quote, quote_spanned, ToTokens};
|
||||
use syn::spanned::Spanned;
|
||||
use syn::{
|
||||
parse::{Parse, ParseStream},
|
||||
parse_macro_input, parse_quote, Attribute, Data, DeriveInput, Fields, GenericParam, Generics,
|
||||
Ident, Index, LitStr, Meta, Token, Type, TypePath,
|
||||
};
|
||||
use syn::{Path, PathArguments};
|
||||
|
||||
/// Implementation of `[#derive(Visit)]`
|
||||
#[proc_macro_derive(VisitMut, attributes(visit))]
|
||||
pub fn derive_visit_mut(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
derive_visit(
|
||||
input,
|
||||
&VisitType {
|
||||
visit_trait: quote!(VisitMut),
|
||||
visitor_trait: quote!(VisitorMut),
|
||||
modifier: Some(quote!(mut)),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Implementation of `[#derive(Visit)]`
|
||||
#[proc_macro_derive(Visit, attributes(visit))]
|
||||
pub fn derive_visit_immutable(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
derive_visit(
|
||||
input,
|
||||
&VisitType {
|
||||
visit_trait: quote!(Visit),
|
||||
visitor_trait: quote!(Visitor),
|
||||
modifier: None,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
struct VisitType {
|
||||
visit_trait: TokenStream,
|
||||
visitor_trait: TokenStream,
|
||||
modifier: Option<TokenStream>,
|
||||
}
|
||||
|
||||
fn derive_visit(input: proc_macro::TokenStream, visit_type: &VisitType) -> proc_macro::TokenStream {
|
||||
// Parse the input tokens into a syntax tree.
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
let name = input.ident;
|
||||
|
||||
let VisitType {
|
||||
visit_trait,
|
||||
visitor_trait,
|
||||
modifier,
|
||||
} = visit_type;
|
||||
|
||||
let attributes = Attributes::parse(&input.attrs);
|
||||
// Add a bound `T: Visit` to every type parameter T.
|
||||
let generics = add_trait_bounds(input.generics, visit_type);
|
||||
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
|
||||
|
||||
let (pre_visit, post_visit) = attributes.visit(quote!(self));
|
||||
let children = visit_children(&input.data, visit_type);
|
||||
|
||||
let expanded = quote! {
|
||||
// The generated impl.
|
||||
// Note that it uses [`recursive::recursive`] to protect from stack overflow.
|
||||
// See tests in https://github.com/apache/datafusion-sqlparser-rs/pull/1522/ for more info.
|
||||
impl #impl_generics sqlparser::ast::#visit_trait for #name #ty_generics #where_clause {
|
||||
#[cfg_attr(feature = "recursive-protection", recursive::recursive)]
|
||||
fn visit<V: sqlparser::ast::#visitor_trait>(
|
||||
&#modifier self,
|
||||
visitor: &mut V
|
||||
) -> ::std::ops::ControlFlow<V::Break> {
|
||||
#pre_visit
|
||||
#children
|
||||
#post_visit
|
||||
::std::ops::ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
proc_macro::TokenStream::from(expanded)
|
||||
}
|
||||
|
||||
/// Parses attributes that can be provided to this macro
|
||||
///
|
||||
/// `#[visit(leaf, with = "visit_expr")]`
|
||||
#[derive(Default)]
|
||||
struct Attributes {
|
||||
/// Content for the `with` attribute
|
||||
with: Option<Ident>,
|
||||
}
|
||||
|
||||
struct WithIdent {
|
||||
with: Option<Ident>,
|
||||
}
|
||||
impl Parse for WithIdent {
|
||||
fn parse(input: ParseStream) -> Result<Self, syn::Error> {
|
||||
let mut result = WithIdent { with: None };
|
||||
let ident = input.parse::<Ident>()?;
|
||||
if ident != "with" {
|
||||
return Err(syn::Error::new(
|
||||
ident.span(),
|
||||
"Expected identifier to be `with`",
|
||||
));
|
||||
}
|
||||
input.parse::<Token!(=)>()?;
|
||||
let s = input.parse::<LitStr>()?;
|
||||
result.with = Some(format_ident!("{}", s.value(), span = s.span()));
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
impl Attributes {
|
||||
fn parse(attrs: &[Attribute]) -> Self {
|
||||
let mut out = Self::default();
|
||||
for attr in attrs {
|
||||
if let Meta::List(ref metalist) = attr.meta {
|
||||
if metalist.path.is_ident("visit") {
|
||||
match syn::parse2::<WithIdent>(metalist.tokens.clone()) {
|
||||
Ok(with_ident) => {
|
||||
out.with = with_ident.with;
|
||||
}
|
||||
Err(e) => {
|
||||
panic!("{}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
/// Returns the pre and post visit token streams
|
||||
fn visit(&self, s: TokenStream) -> (Option<TokenStream>, Option<TokenStream>) {
|
||||
let pre_visit = self.with.as_ref().map(|m| {
|
||||
let m = format_ident!("pre_{}", m);
|
||||
quote!(visitor.#m(#s)?;)
|
||||
});
|
||||
let post_visit = self.with.as_ref().map(|m| {
|
||||
let m = format_ident!("post_{}", m);
|
||||
quote!(visitor.#m(#s)?;)
|
||||
});
|
||||
(pre_visit, post_visit)
|
||||
}
|
||||
}
|
||||
|
||||
// Add a bound `T: Visit` to every type parameter T.
|
||||
fn add_trait_bounds(mut generics: Generics, VisitType { visit_trait, .. }: &VisitType) -> Generics {
|
||||
for param in &mut generics.params {
|
||||
if let GenericParam::Type(ref mut type_param) = *param {
|
||||
type_param
|
||||
.bounds
|
||||
.push(parse_quote!(sqlparser::ast::#visit_trait));
|
||||
}
|
||||
}
|
||||
generics
|
||||
}
|
||||
|
||||
// Generate the body of the visit implementation for the given type
|
||||
fn visit_children(
|
||||
data: &Data,
|
||||
VisitType {
|
||||
visit_trait,
|
||||
modifier,
|
||||
..
|
||||
}: &VisitType,
|
||||
) -> TokenStream {
|
||||
match data {
|
||||
Data::Struct(data) => match &data.fields {
|
||||
Fields::Named(fields) => {
|
||||
let recurse = fields.named.iter().map(|f| {
|
||||
let name = &f.ident;
|
||||
let is_option = is_option(&f.ty);
|
||||
let attributes = Attributes::parse(&f.attrs);
|
||||
if is_option && attributes.with.is_some() {
|
||||
let (pre_visit, post_visit) = attributes.visit(quote!(value));
|
||||
quote_spanned!(f.span() =>
|
||||
if let Some(value) = &#modifier self.#name {
|
||||
#pre_visit sqlparser::ast::#visit_trait::visit(value, visitor)?; #post_visit
|
||||
}
|
||||
)
|
||||
} else {
|
||||
let (pre_visit, post_visit) = attributes.visit(quote!(&#modifier self.#name));
|
||||
quote_spanned!(f.span() =>
|
||||
#pre_visit sqlparser::ast::#visit_trait::visit(&#modifier self.#name, visitor)?; #post_visit
|
||||
)
|
||||
}
|
||||
});
|
||||
quote! {
|
||||
#(#recurse)*
|
||||
}
|
||||
}
|
||||
Fields::Unnamed(fields) => {
|
||||
let recurse = fields.unnamed.iter().enumerate().map(|(i, f)| {
|
||||
let index = Index::from(i);
|
||||
let attributes = Attributes::parse(&f.attrs);
|
||||
let (pre_visit, post_visit) = attributes.visit(quote!(&self.#index));
|
||||
quote_spanned!(f.span() => #pre_visit sqlparser::ast::#visit_trait::visit(&#modifier self.#index, visitor)?; #post_visit)
|
||||
});
|
||||
quote! {
|
||||
#(#recurse)*
|
||||
}
|
||||
}
|
||||
Fields::Unit => {
|
||||
quote!()
|
||||
}
|
||||
},
|
||||
Data::Enum(data) => {
|
||||
let statements = data.variants.iter().map(|v| {
|
||||
let name = &v.ident;
|
||||
match &v.fields {
|
||||
Fields::Named(fields) => {
|
||||
let names = fields.named.iter().map(|f| &f.ident);
|
||||
let visit = fields.named.iter().map(|f| {
|
||||
let name = &f.ident;
|
||||
let attributes = Attributes::parse(&f.attrs);
|
||||
let (pre_visit, post_visit) = attributes.visit(name.to_token_stream());
|
||||
quote_spanned!(f.span() => #pre_visit sqlparser::ast::#visit_trait::visit(#name, visitor)?; #post_visit)
|
||||
});
|
||||
|
||||
quote!(
|
||||
Self::#name { #(#names),* } => {
|
||||
#(#visit)*
|
||||
}
|
||||
)
|
||||
}
|
||||
Fields::Unnamed(fields) => {
|
||||
let names = fields.unnamed.iter().enumerate().map(|(i, f)| format_ident!("_{}", i, span = f.span()));
|
||||
let visit = fields.unnamed.iter().enumerate().map(|(i, f)| {
|
||||
let name = format_ident!("_{}", i);
|
||||
let attributes = Attributes::parse(&f.attrs);
|
||||
let (pre_visit, post_visit) = attributes.visit(name.to_token_stream());
|
||||
quote_spanned!(f.span() => #pre_visit sqlparser::ast::#visit_trait::visit(#name, visitor)?; #post_visit)
|
||||
});
|
||||
|
||||
quote! {
|
||||
Self::#name ( #(#names),*) => {
|
||||
#(#visit)*
|
||||
}
|
||||
}
|
||||
}
|
||||
Fields::Unit => {
|
||||
quote! {
|
||||
Self::#name => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
quote! {
|
||||
match self {
|
||||
#(#statements),*
|
||||
}
|
||||
}
|
||||
}
|
||||
Data::Union(_) => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_option(ty: &Type) -> bool {
|
||||
if let Type::Path(TypePath {
|
||||
path: Path { segments, .. },
|
||||
..
|
||||
}) = ty
|
||||
{
|
||||
if let Some(segment) = segments.last() {
|
||||
if segment.ident == "Option" {
|
||||
if let PathArguments::AngleBracketed(args) = &segment.arguments {
|
||||
return args.args.len() == 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
|
@ -1,187 +0,0 @@
|
|||
<!---
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
|
||||
## Process Overview
|
||||
|
||||
As part of the Apache governance model, official releases consist of signed
|
||||
source tarballs approved by the DataFusion PMC.
|
||||
|
||||
We then use the code in the approved artifacts to release to crates.io.
|
||||
|
||||
### Change Log
|
||||
|
||||
We maintain a `CHANGELOG.md` so our users know what has been changed between releases.
|
||||
|
||||
You will need a GitHub Personal Access Token for the following steps. Follow
|
||||
[these instructions](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token)
|
||||
to generate one if you do not already have one.
|
||||
|
||||
The changelog is generated using a Python script which needs `PyGitHub`, installed using pip:
|
||||
|
||||
```shell
|
||||
pip3 install PyGitHub
|
||||
```
|
||||
|
||||
To generate the changelog, set the `GITHUB_TOKEN` environment variable to a valid token and then run the script
|
||||
providing two commit ids or tags followed by the version number of the release being created. The following
|
||||
example generates a change log of all changes between the first commit and the current HEAD revision.
|
||||
|
||||
```shell
|
||||
export GITHUB_TOKEN=<your-token-here>
|
||||
python ./dev/release/generate-changelog.py v0.51.0 HEAD 0.52.0 > changelog/0.52.0.md
|
||||
```
|
||||
|
||||
This script creates a changelog from GitHub PRs based on the labels associated with them as well as looking for
|
||||
titles starting with `feat:`, `fix:`, or `docs:`.
|
||||
|
||||
Add an entry to CHANGELOG.md for the new version
|
||||
|
||||
## Prepare release commits and PR
|
||||
|
||||
### Update Version
|
||||
|
||||
Checkout the main commit to be released
|
||||
|
||||
```shell
|
||||
git fetch apache
|
||||
git checkout apache/main
|
||||
```
|
||||
|
||||
Manually update the version in the root `Cargo.toml` to the release version (e.g. `0.52.0`).
|
||||
|
||||
Lastly commit the version change:
|
||||
|
||||
```shell
|
||||
git commit -a -m 'Update version'
|
||||
```
|
||||
|
||||
## Prepare release candidate artifacts
|
||||
|
||||
After the PR gets merged, you are ready to create release artifacts from the
|
||||
merged commit.
|
||||
|
||||
(Note you need to be a committer to run these scripts as they upload to the apache svn distribution servers)
|
||||
|
||||
### Pick a Release Candidate (RC) number
|
||||
|
||||
Pick numbers in sequential order, with `0` for `rc0`, `1` for `rc1`, etc.
|
||||
|
||||
### Create git tag for the release:
|
||||
|
||||
While the official release artifacts are signed tarballs and zip files, we also
|
||||
tag the commit it was created for convenience and code archaeology.
|
||||
|
||||
Using a string such as `v0.52.0` as the `<version>`, create and push the tag by running these commands:
|
||||
|
||||
For example to tag version `0.52.0`
|
||||
|
||||
```shell
|
||||
git fetch apache
|
||||
git tag v0.52.0-rc1 apache/main
|
||||
# push tag to Github remote
|
||||
git push apache v0.52.0-rc1
|
||||
```
|
||||
|
||||
### Create, sign, and upload artifacts
|
||||
|
||||
Run `create-tarball.sh` with the `<version>` tag and `<rc>` and you found in previous steps:
|
||||
|
||||
```shell
|
||||
GITHUB_TOKEN=<TOKEN> ./dev/release/create-tarball.sh 0.52.0 1
|
||||
```
|
||||
|
||||
The `create-tarball.sh` script
|
||||
|
||||
1. creates and uploads all release candidate artifacts to the [datafusion
|
||||
dev](https://dist.apache.org/repos/dist/dev/datafusion) location on the
|
||||
apache distribution svn server
|
||||
|
||||
2. provide you an email template to
|
||||
send to dev@datafusion.apache.org for release voting.
|
||||
|
||||
### Vote on Release Candidate artifacts
|
||||
|
||||
Send the email output from the script to dev@datafusion.apache.org.
|
||||
|
||||
For the release to become "official" it needs at least three PMC members to vote +1 on it.
|
||||
|
||||
### Verifying Release Candidates
|
||||
|
||||
The `dev/release/verify-release-candidate.sh` is a script in this repository that can assist in the verification process. Run it like:
|
||||
|
||||
```shell
|
||||
./dev/release/verify-release-candidate.sh 0.52.0 1
|
||||
```
|
||||
|
||||
#### If the release is not approved
|
||||
|
||||
If the release is not approved, fix whatever the problem is, merge changelog
|
||||
changes into main if there is any and try again with the next RC number.
|
||||
|
||||
## Finalize the release
|
||||
|
||||
NOTE: steps in this section can only be done by PMC members.
|
||||
|
||||
### After the release is approved
|
||||
|
||||
Move artifacts to the release location in SVN, using the `release-tarball.sh` script:
|
||||
|
||||
```shell
|
||||
./dev/release/release-tarball.sh 0.52.0 1
|
||||
```
|
||||
|
||||
Promote the rc tag to the release tag
|
||||
```shell
|
||||
git tag v0.52.0 v0.52.0-rc3
|
||||
git push apache v0.52.0
|
||||
```
|
||||
|
||||
Congratulations! The release is now official!
|
||||
|
||||
### Publish on Crates.io
|
||||
|
||||
Only approved releases of the tarball should be published to
|
||||
crates.io, in order to conform to Apache Software Foundation
|
||||
governance standards.
|
||||
|
||||
A DataFusion committer can publish this crate after an official project release has
|
||||
been made to crates.io using the following instructions.
|
||||
|
||||
Follow [these
|
||||
instructions](https://doc.rust-lang.org/cargo/reference/publishing.html) to
|
||||
create an account and login to crates.io before asking to be added as an owner
|
||||
to the sqlparser DataFusion crates.
|
||||
|
||||
Download and unpack the official release tarball
|
||||
|
||||
Verify that the Cargo.toml in the tarball contains the correct version
|
||||
(e.g. `version = "0.52.0"`) and then publish the crates by running the following commands
|
||||
|
||||
```shell
|
||||
cargo publish
|
||||
```
|
||||
|
||||
If necessary, also publish the `sqlparser_derive` crate:
|
||||
|
||||
crates.io homepage: https://crates.io/crates/sqlparser_derive
|
||||
|
||||
```shell
|
||||
(cd derive && cargo publish
|
||||
```
|
|
@ -1,59 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
##############################################################################
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
##############################################################################
|
||||
import fnmatch
|
||||
import re
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
if len(sys.argv) != 3:
|
||||
sys.stderr.write("Usage: %s exclude_globs.lst rat_report.xml\n" %
|
||||
sys.argv[0])
|
||||
sys.exit(1)
|
||||
|
||||
exclude_globs_filename = sys.argv[1]
|
||||
xml_filename = sys.argv[2]
|
||||
|
||||
globs = [line.strip() for line in open(exclude_globs_filename, "r")]
|
||||
|
||||
tree = ET.parse(xml_filename)
|
||||
root = tree.getroot()
|
||||
resources = root.findall('resource')
|
||||
|
||||
all_ok = True
|
||||
for r in resources:
|
||||
approvals = r.findall('license-approval')
|
||||
if not approvals or approvals[0].attrib['name'] == 'true':
|
||||
continue
|
||||
clean_name = re.sub('^[^/]+/', '', r.attrib['name'])
|
||||
excluded = False
|
||||
for g in globs:
|
||||
if fnmatch.fnmatch(clean_name, g):
|
||||
excluded = True
|
||||
break
|
||||
if not excluded:
|
||||
sys.stdout.write("NOT APPROVED: %s (%s): %s\n" % (
|
||||
clean_name, r.attrib['name'], approvals[0].attrib['name']))
|
||||
all_ok = False
|
||||
|
||||
if not all_ok:
|
||||
sys.exit(1)
|
||||
|
||||
print('OK')
|
||||
sys.exit(0)
|
|
@ -1,135 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
# Adapted from https://github.com/apache/datafusion/tree/master/dev/release/create-tarball.sh
|
||||
|
||||
# This script creates a signed tarball in
|
||||
# dev/dist/apache-datafusion-sqlparser-rs-<version>-rc.tar.gz and uploads it to
|
||||
# the "dev" area of the dist.apache.datafusion repository and prepares an
|
||||
# email for sending to the dev@datafusion.apache.org list for a formal
|
||||
# vote.
|
||||
#
|
||||
# See release/README.md for full release instructions
|
||||
#
|
||||
# Requirements:
|
||||
#
|
||||
# 1. gpg setup for signing and have uploaded your public
|
||||
# signature to https://pgp.mit.edu/
|
||||
#
|
||||
# 2. Logged into the apache svn server with the appropriate
|
||||
# credentials
|
||||
#
|
||||
# 3. Install the requests python package
|
||||
#
|
||||
#
|
||||
# Based in part on 02-source.sh from apache/arrow
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
SOURCE_TOP_DIR="$(cd "${SOURCE_DIR}/../../" && pwd)"
|
||||
|
||||
if [ "$#" -ne 2 ]; then
|
||||
echo "Usage: $0 <version> <rc>"
|
||||
echo "ex. $0 0.52.0 2"
|
||||
exit
|
||||
fi
|
||||
|
||||
if [[ -z "${GITHUB_TOKEN}" ]]; then
|
||||
echo "Please set personal github token through GITHUB_TOKEN environment variable"
|
||||
exit
|
||||
fi
|
||||
|
||||
version=$1
|
||||
rc=$2
|
||||
tag="v${version}-rc${rc}"
|
||||
|
||||
echo "Attempting to create ${tarball} from tag ${tag}"
|
||||
release_hash=$(cd "${SOURCE_TOP_DIR}" && git rev-list --max-count=1 ${tag})
|
||||
|
||||
release=apache-datafusion-sqlparser-rs-${version}
|
||||
distdir=${SOURCE_TOP_DIR}/dev/dist/${release}-rc${rc}
|
||||
tarname=${release}.tar.gz
|
||||
tarball=${distdir}/${tarname}
|
||||
url="https://dist.apache.org/repos/dist/dev/datafusion/${release}-rc${rc}"
|
||||
|
||||
if [ -z "$release_hash" ]; then
|
||||
echo "Cannot continue: unknown git tag: ${tag}"
|
||||
fi
|
||||
|
||||
echo "Draft email for dev@datafusion.apache.org mailing list"
|
||||
echo ""
|
||||
echo "---------------------------------------------------------"
|
||||
cat <<MAIL
|
||||
To: dev@datafusion.apache.org
|
||||
Subject: [VOTE] Release Apache DataFusion sqlparser-rs ${version} RC${rc}
|
||||
Hi,
|
||||
|
||||
I would like to propose a release of Apache DataFusion sqlparser-rs version ${version}.
|
||||
|
||||
This release candidate is based on commit: ${release_hash} [1]
|
||||
The proposed release tarball and signatures are hosted at [2].
|
||||
The changelog is located at [3].
|
||||
|
||||
Please download, verify checksums and signatures, run the unit tests, and vote
|
||||
on the release. The vote will be open for at least 72 hours.
|
||||
|
||||
Only votes from PMC members are binding, but all members of the community are
|
||||
encouraged to test the release and vote with "(non-binding)".
|
||||
|
||||
The standard verification procedure is documented at https://github.com/apache/datafusion-sqlparser-rs/blob/main/dev/release/README.md#verifying-release-candidates.
|
||||
|
||||
[ ] +1 Release this as Apache DataFusion sqlparser-rs ${version}
|
||||
[ ] +0
|
||||
[ ] -1 Do not release this as Apache DataFusion sqlparser-rs ${version} because...
|
||||
|
||||
Here is my vote:
|
||||
|
||||
+1
|
||||
|
||||
[1]: https://github.com/apache/datafusion-sqlparser-rs/tree/${release_hash}
|
||||
[2]: ${url}
|
||||
[3]: https://github.com/apache/datafusion-sqlparser-rs/blob/${release_hash}/CHANGELOG.md
|
||||
MAIL
|
||||
echo "---------------------------------------------------------"
|
||||
|
||||
|
||||
# create <tarball> containing the files in git at $release_hash
|
||||
# the files in the tarball are prefixed with {version} (e.g. 4.0.1)
|
||||
mkdir -p ${distdir}
|
||||
(cd "${SOURCE_TOP_DIR}" && git archive ${release_hash} --prefix ${release}/ | gzip > ${tarball})
|
||||
|
||||
echo "Running rat license checker on ${tarball}"
|
||||
${SOURCE_DIR}/run-rat.sh ${tarball}
|
||||
|
||||
echo "Signing tarball and creating checksums"
|
||||
gpg --armor --output ${tarball}.asc --detach-sig ${tarball}
|
||||
# create signing with relative path of tarball
|
||||
# so that they can be verified with a command such as
|
||||
# shasum --check apache-datafusion-sqlparser-rs-0.52.0-rc1.tar.gz.sha512
|
||||
(cd ${distdir} && shasum -a 256 ${tarname}) > ${tarball}.sha256
|
||||
(cd ${distdir} && shasum -a 512 ${tarname}) > ${tarball}.sha512
|
||||
|
||||
|
||||
echo "Uploading to sqlparser-rs dist/dev to ${url}"
|
||||
svn co --depth=empty https://dist.apache.org/repos/dist/dev/datafusion ${SOURCE_TOP_DIR}/dev/dist
|
||||
svn add ${distdir}
|
||||
svn ci -m "Apache DataFusion ${version} ${rc}" ${distdir}
|
|
@ -1,165 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from github import Github
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
def print_pulls(repo_name, title, pulls):
|
||||
if len(pulls) > 0:
|
||||
print("**{}:**".format(title))
|
||||
print()
|
||||
for (pull, commit) in pulls:
|
||||
url = "https://github.com/{}/pull/{}".format(repo_name, pull.number)
|
||||
author = f"({commit.author.login})" if commit.author else ''
|
||||
print("- {} [#{}]({}) {}".format(pull.title, pull.number, url, author))
|
||||
print()
|
||||
|
||||
|
||||
def generate_changelog(repo, repo_name, tag1, tag2, version):
|
||||
|
||||
# get a list of commits between two tags
|
||||
print(f"Fetching list of commits between {tag1} and {tag2}", file=sys.stderr)
|
||||
comparison = repo.compare(tag1, tag2)
|
||||
|
||||
# get the pull requests for these commits
|
||||
print("Fetching pull requests", file=sys.stderr)
|
||||
unique_pulls = []
|
||||
all_pulls = []
|
||||
for commit in comparison.commits:
|
||||
pulls = commit.get_pulls()
|
||||
for pull in pulls:
|
||||
# there can be multiple commits per PR if squash merge is not being used and
|
||||
# in this case we should get all the author names, but for now just pick one
|
||||
if pull.number not in unique_pulls:
|
||||
unique_pulls.append(pull.number)
|
||||
all_pulls.append((pull, commit))
|
||||
|
||||
# we split the pulls into categories
|
||||
breaking = []
|
||||
bugs = []
|
||||
docs = []
|
||||
enhancements = []
|
||||
performance = []
|
||||
other = []
|
||||
|
||||
# categorize the pull requests based on GitHub labels
|
||||
print("Categorizing pull requests", file=sys.stderr)
|
||||
for (pull, commit) in all_pulls:
|
||||
|
||||
# see if PR title uses Conventional Commits
|
||||
cc_type = ''
|
||||
cc_scope = ''
|
||||
cc_breaking = ''
|
||||
parts = re.findall(r'^([a-z]+)(\([a-z]+\))?(!)?:', pull.title)
|
||||
if len(parts) == 1:
|
||||
parts_tuple = parts[0]
|
||||
cc_type = parts_tuple[0] # fix, feat, docs, chore
|
||||
cc_scope = parts_tuple[1] # component within project
|
||||
cc_breaking = parts_tuple[2] == '!'
|
||||
|
||||
labels = [label.name for label in pull.labels]
|
||||
if 'api change' in labels or cc_breaking:
|
||||
breaking.append((pull, commit))
|
||||
elif 'bug' in labels or cc_type == 'fix':
|
||||
bugs.append((pull, commit))
|
||||
elif 'performance' in labels or cc_type == 'perf':
|
||||
performance.append((pull, commit))
|
||||
elif 'enhancement' in labels or cc_type == 'feat':
|
||||
enhancements.append((pull, commit))
|
||||
elif 'documentation' in labels or cc_type == 'docs' or cc_type == 'doc':
|
||||
docs.append((pull, commit))
|
||||
else:
|
||||
other.append((pull, commit))
|
||||
|
||||
# produce the changelog content
|
||||
print("Generating changelog content", file=sys.stderr)
|
||||
|
||||
# ASF header
|
||||
print("""<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->\n""")
|
||||
|
||||
print(f"# sqlparser-rs {version} Changelog\n")
|
||||
|
||||
# get the number of commits
|
||||
commit_count = subprocess.check_output(f"git log --pretty=oneline {tag1}..{tag2} | wc -l", shell=True, text=True).strip()
|
||||
|
||||
# get number of contributors
|
||||
contributor_count = subprocess.check_output(f"git shortlog -sn {tag1}..{tag2} | wc -l", shell=True, text=True).strip()
|
||||
|
||||
print(f"This release consists of {commit_count} commits from {contributor_count} contributors. "
|
||||
f"See credits at the end of this changelog for more information.\n")
|
||||
|
||||
print_pulls(repo_name, "Breaking changes", breaking)
|
||||
print_pulls(repo_name, "Performance related", performance)
|
||||
print_pulls(repo_name, "Implemented enhancements", enhancements)
|
||||
print_pulls(repo_name, "Fixed bugs", bugs)
|
||||
print_pulls(repo_name, "Documentation updates", docs)
|
||||
print_pulls(repo_name, "Other", other)
|
||||
|
||||
# show code contributions
|
||||
credits = subprocess.check_output(f"git shortlog -sn {tag1}..{tag2}", shell=True, text=True).rstrip()
|
||||
|
||||
print("## Credits\n")
|
||||
print("Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) "
|
||||
"per contributor.\n")
|
||||
print("```")
|
||||
print(credits)
|
||||
print("```\n")
|
||||
|
||||
print("Thank you also to everyone who contributed in other ways such as filing issues, reviewing "
|
||||
"PRs, and providing feedback on this release.\n")
|
||||
|
||||
def cli(args=None):
|
||||
"""Process command line arguments."""
|
||||
if not args:
|
||||
args = sys.argv[1:]
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("tag1", help="The previous commit or tag (e.g. 0.1.0)")
|
||||
parser.add_argument("tag2", help="The current commit or tag (e.g. HEAD)")
|
||||
parser.add_argument("version", help="The version number to include in the changelog")
|
||||
args = parser.parse_args()
|
||||
|
||||
token = os.getenv("GITHUB_TOKEN")
|
||||
project = "apache/datafusion-sqlparser-rs"
|
||||
|
||||
g = Github(token)
|
||||
repo = g.get_repo(project)
|
||||
generate_changelog(repo, project, args.tag1, args.tag2, args.version)
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
|
@ -1,8 +0,0 @@
|
|||
.tool-versions
|
||||
target/*
|
||||
**.gitignore
|
||||
rat.txt
|
||||
dev/release/rat_exclude_files.txt
|
||||
sqlparser_bench/img/flamegraph.svg
|
||||
**Cargo.lock
|
||||
filtered_rat.txt
|
|
@ -1,74 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
# Adapted from https://github.com/apache/arrow-rs/tree/master/dev/release/release-tarball.sh
|
||||
|
||||
# This script copies a tarball from the "dev" area of the
|
||||
# dist.apache.datafusion repository to the "release" area
|
||||
#
|
||||
# This script should only be run after the release has been approved
|
||||
# by the Apache DataFusion PMC committee.
|
||||
#
|
||||
# See release/README.md for full release instructions
|
||||
#
|
||||
# Based in part on post-01-upload.sh from apache/arrow
|
||||
|
||||
|
||||
set -e
|
||||
set -u
|
||||
|
||||
if [ "$#" -ne 2 ]; then
|
||||
echo "Usage: $0 <version> <rc-num>"
|
||||
echo "ex. $0 0.52.0 2"
|
||||
exit
|
||||
fi
|
||||
|
||||
version=$1
|
||||
rc=$2
|
||||
|
||||
tmp_dir=tmp-apache-datafusion-dist
|
||||
|
||||
echo "Recreate temporary directory: ${tmp_dir}"
|
||||
rm -rf ${tmp_dir}
|
||||
mkdir -p ${tmp_dir}
|
||||
|
||||
echo "Clone dev dist repository"
|
||||
svn \
|
||||
co \
|
||||
https://dist.apache.org/repos/dist/dev/datafusion/apache-datafusion-sqlparser-rs-${version}-rc${rc} \
|
||||
${tmp_dir}/dev
|
||||
|
||||
echo "Clone release dist repository"
|
||||
svn co https://dist.apache.org/repos/dist/release/datafusion ${tmp_dir}/release
|
||||
|
||||
echo "Copy ${version}-rc${rc} to release working copy"
|
||||
release_version=datafusion-sqlparser-rs-${version}
|
||||
mkdir -p ${tmp_dir}/release/${release_version}
|
||||
cp -r ${tmp_dir}/dev/* ${tmp_dir}/release/${release_version}/
|
||||
svn add ${tmp_dir}/release/${release_version}
|
||||
|
||||
echo "Commit release"
|
||||
svn ci -m "Apache DataFusion sqlparser-rs ${version}" ${tmp_dir}/release
|
||||
|
||||
echo "Clean up"
|
||||
rm -rf ${tmp_dir}
|
||||
|
||||
echo "Success! The release is available here:"
|
||||
echo " https://dist.apache.org/repos/dist/release/datafusion/${release_version}"
|
|
@ -1,43 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
RAT_VERSION=0.13
|
||||
|
||||
# download apache rat
|
||||
if [ ! -f apache-rat-${RAT_VERSION}.jar ]; then
|
||||
curl -s https://repo1.maven.org/maven2/org/apache/rat/apache-rat/${RAT_VERSION}/apache-rat-${RAT_VERSION}.jar > apache-rat-${RAT_VERSION}.jar
|
||||
fi
|
||||
|
||||
RAT="java -jar apache-rat-${RAT_VERSION}.jar -x "
|
||||
|
||||
RELEASE_DIR=$(cd "$(dirname "$BASH_SOURCE")"; pwd)
|
||||
|
||||
# generate the rat report
|
||||
$RAT $1 > rat.txt
|
||||
python $RELEASE_DIR/check-rat-report.py $RELEASE_DIR/rat_exclude_files.txt rat.txt > filtered_rat.txt
|
||||
cat filtered_rat.txt
|
||||
UNAPPROVED=`cat filtered_rat.txt | grep "NOT APPROVED" | wc -l`
|
||||
|
||||
if [ "0" -eq "${UNAPPROVED}" ]; then
|
||||
echo "No unapproved licenses"
|
||||
else
|
||||
echo "${UNAPPROVED} unapproved licences. Check rat report: rat.txt"
|
||||
exit 1
|
||||
fi
|
|
@ -1,152 +0,0 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
case $# in
|
||||
2) VERSION="$1"
|
||||
RC_NUMBER="$2"
|
||||
;;
|
||||
*) echo "Usage: $0 X.Y.Z RC_NUMBER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
set -e
|
||||
set -x
|
||||
set -o pipefail
|
||||
|
||||
SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]:-$0}")" && pwd)"
|
||||
ARROW_DIR="$(dirname $(dirname ${SOURCE_DIR}))"
|
||||
ARROW_DIST_URL='https://dist.apache.org/repos/dist/dev/datafusion'
|
||||
|
||||
download_dist_file() {
|
||||
curl \
|
||||
--silent \
|
||||
--show-error \
|
||||
--fail \
|
||||
--location \
|
||||
--remote-name $ARROW_DIST_URL/$1
|
||||
}
|
||||
|
||||
download_rc_file() {
|
||||
download_dist_file apache-datafusion-sqlparser-rs-${VERSION}-rc${RC_NUMBER}/$1
|
||||
}
|
||||
|
||||
import_gpg_keys() {
|
||||
download_dist_file KEYS
|
||||
gpg --import KEYS
|
||||
}
|
||||
|
||||
if type shasum >/dev/null 2>&1; then
|
||||
sha256_verify="shasum -a 256 -c"
|
||||
sha512_verify="shasum -a 512 -c"
|
||||
else
|
||||
sha256_verify="sha256sum -c"
|
||||
sha512_verify="sha512sum -c"
|
||||
fi
|
||||
|
||||
fetch_archive() {
|
||||
local dist_name=$1
|
||||
download_rc_file ${dist_name}.tar.gz
|
||||
download_rc_file ${dist_name}.tar.gz.asc
|
||||
download_rc_file ${dist_name}.tar.gz.sha256
|
||||
download_rc_file ${dist_name}.tar.gz.sha512
|
||||
verify_dir_artifact_signatures
|
||||
}
|
||||
|
||||
verify_dir_artifact_signatures() {
|
||||
# verify the signature and the checksums of each artifact
|
||||
find . -name '*.asc' | while read sigfile; do
|
||||
artifact=${sigfile/.asc/}
|
||||
gpg --verify $sigfile $artifact || exit 1
|
||||
|
||||
# go into the directory because the checksum files contain only the
|
||||
# basename of the artifact
|
||||
pushd $(dirname $artifact)
|
||||
base_artifact=$(basename $artifact)
|
||||
${sha256_verify} $base_artifact.sha256 || exit 1
|
||||
${sha512_verify} $base_artifact.sha512 || exit 1
|
||||
popd
|
||||
done
|
||||
}
|
||||
|
||||
setup_tempdir() {
|
||||
cleanup() {
|
||||
if [ "${TEST_SUCCESS}" = "yes" ]; then
|
||||
rm -fr "${ARROW_TMPDIR}"
|
||||
else
|
||||
echo "Failed to verify release candidate. See ${ARROW_TMPDIR} for details."
|
||||
fi
|
||||
}
|
||||
|
||||
if [ -z "${ARROW_TMPDIR}" ]; then
|
||||
# clean up automatically if ARROW_TMPDIR is not defined
|
||||
ARROW_TMPDIR=$(mktemp -d -t "$1.XXXXX")
|
||||
trap cleanup EXIT
|
||||
else
|
||||
# don't clean up automatically
|
||||
mkdir -p "${ARROW_TMPDIR}"
|
||||
fi
|
||||
}
|
||||
|
||||
test_source_distribution() {
|
||||
# install rust toolchain in a similar fashion like test-miniconda
|
||||
export RUSTUP_HOME=$PWD/test-rustup
|
||||
export CARGO_HOME=$PWD/test-rustup
|
||||
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path
|
||||
|
||||
export PATH=$RUSTUP_HOME/bin:$PATH
|
||||
source $RUSTUP_HOME/env
|
||||
|
||||
# build and test rust
|
||||
|
||||
# raises on any formatting errors
|
||||
rustup component add rustfmt --toolchain stable
|
||||
cargo fmt --all -- --check
|
||||
|
||||
cargo build
|
||||
cargo test --all-features
|
||||
|
||||
if ( find -iname 'Cargo.toml' | xargs grep SNAPSHOT ); then
|
||||
echo "Cargo.toml version should not contain SNAPSHOT for releases"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check that publish works
|
||||
cargo publish --dry-run
|
||||
}
|
||||
|
||||
TEST_SUCCESS=no
|
||||
|
||||
setup_tempdir "datafusion-sqlparser-rs-${VERSION}"
|
||||
echo "Working in sandbox ${ARROW_TMPDIR}"
|
||||
cd ${ARROW_TMPDIR}
|
||||
|
||||
dist_name="apache-datafusion-sqlparser-rs-${VERSION}"
|
||||
import_gpg_keys
|
||||
fetch_archive ${dist_name}
|
||||
tar xf ${dist_name}.tar.gz
|
||||
pushd ${dist_name}
|
||||
test_source_distribution
|
||||
popd
|
||||
|
||||
TEST_SUCCESS=yes
|
||||
echo 'Release candidate looks good!'
|
||||
exit 0
|
|
@ -1,22 +1,3 @@
|
|||
<!---
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Benchmarking
|
||||
|
||||
Run `cargo bench` in the project `sqlparser_bench` execute the queries.
|
||||
|
|
|
@ -1,29 +1,10 @@
|
|||
<!---
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Writing a Custom SQL Parser
|
||||
|
||||
I have explored many different ways of building this library to make it easy to extend it for custom SQL dialects. Most of my attempts ended in failure but I have now found a workable solution. It is not without downsides but this seems to be the most pragmatic solution.
|
||||
|
||||
The concept is simply to write a new parser that delegates to the ANSI parser so that as much as possible of the core functionality can be re-used.
|
||||
|
||||
I also plan on building in specific support for custom data types, where a lambda function can be passed to the parser to parse data types.
|
||||
I also plan on building in specific support for custom data types, where a lambda function can be parsed to the parser to parse data types.
|
||||
|
||||
For an example of this, see the [DataFusion](https://github.com/apache/arrow-datafusion) project and its [query planner](https://github.com/apache/arrow-datafusion/tree/master/datafusion/sql).
|
||||
For an example of this, see the [DataFusion](https://github.com/datafusion-rs/datafusion) project.
|
||||
|
||||
|
|
|
@ -1,22 +1,3 @@
|
|||
<!---
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
# Fuzzing
|
||||
|
||||
## Installing `honggfuzz`
|
||||
|
|
58
docs/releasing.md
Normal file
58
docs/releasing.md
Normal file
|
@ -0,0 +1,58 @@
|
|||
# Releasing
|
||||
|
||||
## Prerequisites
|
||||
Publishing to crates.io has been automated via GitHub Actions, so you will only
|
||||
need push access to the [sqlparser-rs GitHub repository](https://github.com/sqlparser-rs/sqlparser-rs)
|
||||
in order to publish a release.
|
||||
|
||||
We use the [`cargo release`](https://github.com/sunng87/cargo-release)
|
||||
subcommand to ensure correct versioning. Install via:
|
||||
|
||||
```
|
||||
$ cargo install cargo-release
|
||||
```
|
||||
|
||||
## Process
|
||||
|
||||
1. **Before releasing** ensure `CHANGELOG.md` is updated appropriately and that
|
||||
you have a clean checkout of the `main` branch of the sqlparser repository:
|
||||
```
|
||||
$ git fetch && git status
|
||||
On branch main
|
||||
Your branch is up to date with 'upstream/main'.
|
||||
|
||||
nothing to commit, working tree clean
|
||||
```
|
||||
* If you have the time, check that the examples in the README are up to date.
|
||||
|
||||
2. Using `cargo-release` we can publish a new release like so:
|
||||
|
||||
```
|
||||
$ cargo release minor --push-remote upstream
|
||||
```
|
||||
|
||||
After verifying, you can rerun with `--execute` if all looks good.
|
||||
You can add `--no-push` to stop before actually publishing the release.
|
||||
|
||||
`cargo release` will then:
|
||||
|
||||
* Bump the minor part of the version in `Cargo.toml` (e.g. `0.7.1-alpha.0`
|
||||
-> `0.8.0`. You can use `patch` instead of `minor`, as appropriate).
|
||||
* Create a new tag (e.g. `v0.8.0`) locally
|
||||
* Push the new tag to the specified remote (`upstream` in the above
|
||||
example), which will trigger a publishing process to crates.io as part of
|
||||
the [corresponding GitHub Action](https://github.com/sqlparser-rs/sqlparser-rs/blob/main/.github/workflows/rust.yml).
|
||||
|
||||
Note that credentials for authoring in this way are securely stored in
|
||||
the (GitHub) repo secrets as `CRATE_TOKEN`.
|
||||
* Bump the crate version again (to something like `0.8.1-alpha.0`) to
|
||||
indicate the start of new development cycle.
|
||||
|
||||
3. Push the updates to the `main` branch upstream:
|
||||
```
|
||||
$ git push upstream
|
||||
```
|
||||
|
||||
4. Check that the new version of the crate is available on crates.io:
|
||||
https://crates.io/crates/sqlparser
|
||||
|
|
@ -1,27 +1,20 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#![warn(clippy::all)]
|
||||
|
||||
//! A small command-line app to run the parser.
|
||||
//! Run with `cargo run --example cli`
|
||||
|
||||
/// A small command-line app to run the parser.
|
||||
/// Run with `cargo run --example cli`
|
||||
use std::fs;
|
||||
use std::io::{stdin, Read};
|
||||
|
||||
use simple_logger::SimpleLogger;
|
||||
use sqlparser::dialect::*;
|
||||
|
@ -40,9 +33,6 @@ $ cargo run --example cli FILENAME.sql [--dialectname]
|
|||
To print the parse results as JSON:
|
||||
$ cargo run --feature json_example --example cli FILENAME.sql [--dialectname]
|
||||
|
||||
To read from stdin instead of a file:
|
||||
$ cargo run --example cli - [--dialectname]
|
||||
|
||||
"#,
|
||||
);
|
||||
|
||||
|
@ -55,25 +45,13 @@ $ cargo run --example cli - [--dialectname]
|
|||
"--snowflake" => Box::new(SnowflakeDialect {}),
|
||||
"--hive" => Box::new(HiveDialect {}),
|
||||
"--redshift" => Box::new(RedshiftSqlDialect {}),
|
||||
"--clickhouse" => Box::new(ClickHouseDialect {}),
|
||||
"--duckdb" => Box::new(DuckDbDialect {}),
|
||||
"--sqlite" => Box::new(SQLiteDialect {}),
|
||||
"--generic" | "" => Box::new(GenericDialect {}),
|
||||
s => panic!("Unexpected parameter: {s}"),
|
||||
s => panic!("Unexpected parameter: {}", s),
|
||||
};
|
||||
|
||||
let contents = if filename == "-" {
|
||||
println!("Parsing from stdin using {dialect:?}");
|
||||
let mut buf = Vec::new();
|
||||
stdin()
|
||||
.read_to_end(&mut buf)
|
||||
.expect("failed to read from stdin");
|
||||
String::from_utf8(buf).expect("stdin content wasn't valid utf8")
|
||||
} else {
|
||||
println!("Parsing from file '{}' using {:?}", &filename, dialect);
|
||||
fs::read_to_string(&filename)
|
||||
.unwrap_or_else(|_| panic!("Unable to read the file {}", &filename))
|
||||
};
|
||||
println!("Parsing from file '{}' using {:?}", &filename, dialect);
|
||||
let contents = fs::read_to_string(&filename)
|
||||
.unwrap_or_else(|_| panic!("Unable to read the file {}", &filename));
|
||||
let without_bom = if contents.chars().next().unwrap() as u64 != 0xfeff {
|
||||
contents.as_str()
|
||||
} else {
|
||||
|
@ -97,16 +75,16 @@ $ cargo run --example cli - [--dialectname]
|
|||
#[cfg(feature = "json_example")]
|
||||
{
|
||||
let serialized = serde_json::to_string_pretty(&statements).unwrap();
|
||||
println!("Serialized as JSON:\n{serialized}");
|
||||
println!("Serialized as JSON:\n{}", serialized);
|
||||
}
|
||||
} else {
|
||||
println!("Parse results:\n{statements:#?}");
|
||||
println!("Parse results:\n{:#?}", statements);
|
||||
}
|
||||
|
||||
std::process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Error during parsing: {e:?}");
|
||||
println!("Error during parsing: {:?}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,19 +1,14 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#![warn(clippy::all)]
|
||||
|
||||
|
@ -30,5 +25,5 @@ fn main() {
|
|||
|
||||
let ast = Parser::parse_sql(&dialect, sql).unwrap();
|
||||
|
||||
println!("AST: {ast:?}");
|
||||
println!("AST: {:?}", ast);
|
||||
}
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
[package]
|
||||
name = "fuzz"
|
||||
version = "0.1.0"
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
use honggfuzz::fuzz;
|
||||
use sqlparser::dialect::GenericDialect;
|
||||
use sqlparser::parser::Parser;
|
||||
|
|
1
rust-toolchain
Normal file
1
rust-toolchain
Normal file
|
@ -0,0 +1 @@
|
|||
stable
|
17
rustfmt.toml
17
rustfmt.toml
|
@ -1,18 +1 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# We use rustfmt's default settings to format the source code
|
|
@ -1,23 +1,5 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
[package]
|
||||
name = "sqlparser_bench"
|
||||
description = "Benchmarks for sqlparser"
|
||||
version = "0.1.0"
|
||||
authors = ["Dandandan <danielheres@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
@ -26,7 +8,7 @@ edition = "2018"
|
|||
sqlparser = { path = "../" }
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = "0.6"
|
||||
criterion = "0.4"
|
||||
|
||||
[[bench]]
|
||||
name = "sqlparser_bench"
|
||||
|
|
|
@ -1,42 +0,0 @@
|
|||
<!---
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
|
||||
Benchmarks for sqlparser. See [the main README](../README.md) for more information.
|
||||
|
||||
Note: this is in a separate, non workspace crate to avoid adding a dependency
|
||||
on `criterion` to the main crate (which complicates testing without std).
|
||||
|
||||
# Running Benchmarks
|
||||
|
||||
```shell
|
||||
cargo bench --bench sqlparser_bench
|
||||
```
|
||||
|
||||
# Profiling
|
||||
|
||||
Note you can generate a [flamegraph] using the following command:
|
||||
|
||||
```shell
|
||||
cargo flamegraph --bench sqlparser_bench
|
||||
```
|
||||
|
||||
[flamegraph]: https://crates.io/crates/flamegraph
|
||||
|
||||
Here is an example flamegraph:
|
||||

|
|
@ -1,19 +1,14 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use sqlparser::dialect::GenericDialect;
|
||||
|
@ -23,9 +18,9 @@ fn basic_queries(c: &mut Criterion) {
|
|||
let mut group = c.benchmark_group("sqlparser-rs parsing benchmark");
|
||||
let dialect = GenericDialect {};
|
||||
|
||||
let string = "SELECT * FROM my_table WHERE 1 = 1";
|
||||
let string = "SELECT * FROM table WHERE 1 = 1";
|
||||
group.bench_function("sqlparser::select", |b| {
|
||||
b.iter(|| Parser::parse_sql(&dialect, string).unwrap());
|
||||
b.iter(|| Parser::parse_sql(&dialect, string));
|
||||
});
|
||||
|
||||
let with_query = "
|
||||
|
@ -33,52 +28,14 @@ fn basic_queries(c: &mut Criterion) {
|
|||
SELECT MAX(a) AS max_a,
|
||||
COUNT(b) AS b_num,
|
||||
user_id
|
||||
FROM MY_TABLE
|
||||
FROM TABLE
|
||||
GROUP BY user_id
|
||||
)
|
||||
SELECT * FROM my_table
|
||||
SELECT * FROM table
|
||||
LEFT JOIN derived USING (user_id)
|
||||
";
|
||||
group.bench_function("sqlparser::with_select", |b| {
|
||||
b.iter(|| Parser::parse_sql(&dialect, with_query).unwrap());
|
||||
});
|
||||
|
||||
let large_statement = {
|
||||
let expressions = (0..1000)
|
||||
.map(|n| format!("FN_{n}(COL_{n})"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
let tables = (0..1000)
|
||||
.map(|n| format!("TABLE_{n}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" JOIN ");
|
||||
let where_condition = (0..1000)
|
||||
.map(|n| format!("COL_{n} = {n}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" OR ");
|
||||
let order_condition = (0..1000)
|
||||
.map(|n| format!("COL_{n} DESC"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
|
||||
format!(
|
||||
"SELECT {expressions} FROM {tables} WHERE {where_condition} ORDER BY {order_condition}"
|
||||
)
|
||||
};
|
||||
|
||||
group.bench_function("parse_large_statement", |b| {
|
||||
b.iter(|| Parser::parse_sql(&dialect, std::hint::black_box(large_statement.as_str())));
|
||||
});
|
||||
|
||||
let large_statement = Parser::parse_sql(&dialect, large_statement.as_str())
|
||||
.unwrap()
|
||||
.pop()
|
||||
.unwrap();
|
||||
|
||||
group.bench_function("format_large_statement", |b| {
|
||||
b.iter(|| {
|
||||
let _formatted_query = large_statement.to_string();
|
||||
});
|
||||
b.iter(|| Parser::parse_sql(&dialect, with_query));
|
||||
});
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because one or more lines are too long
Before Width: | Height: | Size: 1.5 MiB |
1053
src/ast/data_type.rs
1053
src/ast/data_type.rs
File diff suppressed because it is too large
Load diff
254
src/ast/dcl.rs
254
src/ast/dcl.rs
|
@ -1,254 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
//! AST types specific to GRANT/REVOKE/ROLE variants of [`Statement`](crate::ast::Statement)
|
||||
//! (commonly referred to as Data Control Language, or DCL)
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt;
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[cfg(feature = "visitor")]
|
||||
use sqlparser_derive::{Visit, VisitMut};
|
||||
|
||||
use super::{display_comma_separated, Expr, Ident, Password};
|
||||
use crate::ast::{display_separated, ObjectName};
|
||||
|
||||
/// An option in `ROLE` statement.
|
||||
///
|
||||
/// <https://www.postgresql.org/docs/current/sql-createrole.html>
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum RoleOption {
|
||||
BypassRLS(bool),
|
||||
ConnectionLimit(Expr),
|
||||
CreateDB(bool),
|
||||
CreateRole(bool),
|
||||
Inherit(bool),
|
||||
Login(bool),
|
||||
Password(Password),
|
||||
Replication(bool),
|
||||
SuperUser(bool),
|
||||
ValidUntil(Expr),
|
||||
}
|
||||
|
||||
impl fmt::Display for RoleOption {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
RoleOption::BypassRLS(value) => {
|
||||
write!(f, "{}", if *value { "BYPASSRLS" } else { "NOBYPASSRLS" })
|
||||
}
|
||||
RoleOption::ConnectionLimit(expr) => {
|
||||
write!(f, "CONNECTION LIMIT {expr}")
|
||||
}
|
||||
RoleOption::CreateDB(value) => {
|
||||
write!(f, "{}", if *value { "CREATEDB" } else { "NOCREATEDB" })
|
||||
}
|
||||
RoleOption::CreateRole(value) => {
|
||||
write!(f, "{}", if *value { "CREATEROLE" } else { "NOCREATEROLE" })
|
||||
}
|
||||
RoleOption::Inherit(value) => {
|
||||
write!(f, "{}", if *value { "INHERIT" } else { "NOINHERIT" })
|
||||
}
|
||||
RoleOption::Login(value) => {
|
||||
write!(f, "{}", if *value { "LOGIN" } else { "NOLOGIN" })
|
||||
}
|
||||
RoleOption::Password(password) => match password {
|
||||
Password::Password(expr) => write!(f, "PASSWORD {expr}"),
|
||||
Password::NullPassword => write!(f, "PASSWORD NULL"),
|
||||
},
|
||||
RoleOption::Replication(value) => {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
if *value {
|
||||
"REPLICATION"
|
||||
} else {
|
||||
"NOREPLICATION"
|
||||
}
|
||||
)
|
||||
}
|
||||
RoleOption::SuperUser(value) => {
|
||||
write!(f, "{}", if *value { "SUPERUSER" } else { "NOSUPERUSER" })
|
||||
}
|
||||
RoleOption::ValidUntil(expr) => {
|
||||
write!(f, "VALID UNTIL {expr}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// SET config value option:
|
||||
/// * SET `configuration_parameter` { TO | = } { `value` | DEFAULT }
|
||||
/// * SET `configuration_parameter` FROM CURRENT
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum SetConfigValue {
|
||||
Default,
|
||||
FromCurrent,
|
||||
Value(Expr),
|
||||
}
|
||||
|
||||
/// RESET config option:
|
||||
/// * RESET `configuration_parameter`
|
||||
/// * RESET ALL
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum ResetConfig {
|
||||
ALL,
|
||||
ConfigName(ObjectName),
|
||||
}
|
||||
|
||||
/// An `ALTER ROLE` (`Statement::AlterRole`) operation
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum AlterRoleOperation {
|
||||
/// Generic
|
||||
RenameRole {
|
||||
role_name: Ident,
|
||||
},
|
||||
/// MS SQL Server
|
||||
/// <https://learn.microsoft.com/en-us/sql/t-sql/statements/alter-role-transact-sql>
|
||||
AddMember {
|
||||
member_name: Ident,
|
||||
},
|
||||
DropMember {
|
||||
member_name: Ident,
|
||||
},
|
||||
/// PostgreSQL
|
||||
/// <https://www.postgresql.org/docs/current/sql-alterrole.html>
|
||||
WithOptions {
|
||||
options: Vec<RoleOption>,
|
||||
},
|
||||
Set {
|
||||
config_name: ObjectName,
|
||||
config_value: SetConfigValue,
|
||||
in_database: Option<ObjectName>,
|
||||
},
|
||||
Reset {
|
||||
config_name: ResetConfig,
|
||||
in_database: Option<ObjectName>,
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Display for AlterRoleOperation {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
AlterRoleOperation::RenameRole { role_name } => {
|
||||
write!(f, "RENAME TO {role_name}")
|
||||
}
|
||||
AlterRoleOperation::AddMember { member_name } => {
|
||||
write!(f, "ADD MEMBER {member_name}")
|
||||
}
|
||||
AlterRoleOperation::DropMember { member_name } => {
|
||||
write!(f, "DROP MEMBER {member_name}")
|
||||
}
|
||||
AlterRoleOperation::WithOptions { options } => {
|
||||
write!(f, "WITH {}", display_separated(options, " "))
|
||||
}
|
||||
AlterRoleOperation::Set {
|
||||
config_name,
|
||||
config_value,
|
||||
in_database,
|
||||
} => {
|
||||
if let Some(database_name) = in_database {
|
||||
write!(f, "IN DATABASE {database_name} ")?;
|
||||
}
|
||||
|
||||
match config_value {
|
||||
SetConfigValue::Default => write!(f, "SET {config_name} TO DEFAULT"),
|
||||
SetConfigValue::FromCurrent => write!(f, "SET {config_name} FROM CURRENT"),
|
||||
SetConfigValue::Value(expr) => write!(f, "SET {config_name} TO {expr}"),
|
||||
}
|
||||
}
|
||||
AlterRoleOperation::Reset {
|
||||
config_name,
|
||||
in_database,
|
||||
} => {
|
||||
if let Some(database_name) = in_database {
|
||||
write!(f, "IN DATABASE {database_name} ")?;
|
||||
}
|
||||
|
||||
match config_name {
|
||||
ResetConfig::ALL => write!(f, "RESET ALL"),
|
||||
ResetConfig::ConfigName(name) => write!(f, "RESET {name}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A `USE` (`Statement::Use`) operation
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum Use {
|
||||
Catalog(ObjectName), // e.g. `USE CATALOG foo.bar`
|
||||
Schema(ObjectName), // e.g. `USE SCHEMA foo.bar`
|
||||
Database(ObjectName), // e.g. `USE DATABASE foo.bar`
|
||||
Warehouse(ObjectName), // e.g. `USE WAREHOUSE foo.bar`
|
||||
Role(ObjectName), // e.g. `USE ROLE PUBLIC`
|
||||
SecondaryRoles(SecondaryRoles), // e.g. `USE SECONDARY ROLES ALL`
|
||||
Object(ObjectName), // e.g. `USE foo.bar`
|
||||
Default, // e.g. `USE DEFAULT`
|
||||
}
|
||||
|
||||
impl fmt::Display for Use {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str("USE ")?;
|
||||
match self {
|
||||
Use::Catalog(name) => write!(f, "CATALOG {name}"),
|
||||
Use::Schema(name) => write!(f, "SCHEMA {name}"),
|
||||
Use::Database(name) => write!(f, "DATABASE {name}"),
|
||||
Use::Warehouse(name) => write!(f, "WAREHOUSE {name}"),
|
||||
Use::Role(name) => write!(f, "ROLE {name}"),
|
||||
Use::SecondaryRoles(secondary_roles) => {
|
||||
write!(f, "SECONDARY ROLES {secondary_roles}")
|
||||
}
|
||||
Use::Object(name) => write!(f, "{name}"),
|
||||
Use::Default => write!(f, "DEFAULT"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Snowflake `SECONDARY ROLES` USE variant
|
||||
/// See: <https://docs.snowflake.com/en/sql-reference/sql/use-secondary-roles>
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum SecondaryRoles {
|
||||
All,
|
||||
None,
|
||||
List(Vec<Ident>),
|
||||
}
|
||||
|
||||
impl fmt::Display for SecondaryRoles {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
SecondaryRoles::All => write!(f, "ALL"),
|
||||
SecondaryRoles::None => write!(f, "NONE"),
|
||||
SecondaryRoles::List(roles) => write!(f, "{}", display_comma_separated(roles)),
|
||||
}
|
||||
}
|
||||
}
|
2263
src/ast/ddl.rs
2263
src/ast/ddl.rs
File diff suppressed because it is too large
Load diff
709
src/ast/dml.rs
709
src/ast/dml.rs
|
@ -1,709 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{
|
||||
boxed::Box,
|
||||
format,
|
||||
string::{String, ToString},
|
||||
vec::Vec,
|
||||
};
|
||||
|
||||
use core::fmt::{self, Display};
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(feature = "visitor")]
|
||||
use sqlparser_derive::{Visit, VisitMut};
|
||||
|
||||
use crate::display_utils::{indented_list, DisplayCommaSeparated, Indent, NewLine, SpaceOrNewline};
|
||||
|
||||
pub use super::ddl::{ColumnDef, TableConstraint};
|
||||
|
||||
use super::{
|
||||
display_comma_separated, display_separated, query::InputFormatClause, Assignment, ClusteredBy,
|
||||
CommentDef, CreateTableOptions, Expr, FileFormat, FromTable, HiveDistributionStyle, HiveFormat,
|
||||
HiveIOFormat, HiveRowFormat, Ident, IndexType, InsertAliases, MysqlInsertPriority, ObjectName,
|
||||
OnCommit, OnInsert, OneOrManyWithParens, OrderByExpr, Query, RowAccessPolicy, SelectItem,
|
||||
Setting, SqliteOnConflict, StorageSerializationPolicy, TableObject, TableWithJoins, Tag,
|
||||
WrappedCollection,
|
||||
};
|
||||
|
||||
/// Index column type.
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct IndexColumn {
|
||||
pub column: OrderByExpr,
|
||||
pub operator_class: Option<Ident>,
|
||||
}
|
||||
|
||||
impl Display for IndexColumn {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.column)?;
|
||||
if let Some(operator_class) = &self.operator_class {
|
||||
write!(f, " {operator_class}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// CREATE INDEX statement.
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct CreateIndex {
|
||||
/// index name
|
||||
pub name: Option<ObjectName>,
|
||||
#[cfg_attr(feature = "visitor", visit(with = "visit_relation"))]
|
||||
pub table_name: ObjectName,
|
||||
pub using: Option<IndexType>,
|
||||
pub columns: Vec<IndexColumn>,
|
||||
pub unique: bool,
|
||||
pub concurrently: bool,
|
||||
pub if_not_exists: bool,
|
||||
pub include: Vec<Ident>,
|
||||
pub nulls_distinct: Option<bool>,
|
||||
/// WITH clause: <https://www.postgresql.org/docs/current/sql-createindex.html>
|
||||
pub with: Vec<Expr>,
|
||||
pub predicate: Option<Expr>,
|
||||
}
|
||||
|
||||
impl Display for CreateIndex {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"CREATE {unique}INDEX {concurrently}{if_not_exists}",
|
||||
unique = if self.unique { "UNIQUE " } else { "" },
|
||||
concurrently = if self.concurrently {
|
||||
"CONCURRENTLY "
|
||||
} else {
|
||||
""
|
||||
},
|
||||
if_not_exists = if self.if_not_exists {
|
||||
"IF NOT EXISTS "
|
||||
} else {
|
||||
""
|
||||
},
|
||||
)?;
|
||||
if let Some(value) = &self.name {
|
||||
write!(f, "{value} ")?;
|
||||
}
|
||||
write!(f, "ON {}", self.table_name)?;
|
||||
if let Some(value) = &self.using {
|
||||
write!(f, " USING {value} ")?;
|
||||
}
|
||||
write!(f, "({})", display_separated(&self.columns, ","))?;
|
||||
if !self.include.is_empty() {
|
||||
write!(f, " INCLUDE ({})", display_separated(&self.include, ","))?;
|
||||
}
|
||||
if let Some(value) = self.nulls_distinct {
|
||||
if value {
|
||||
write!(f, " NULLS DISTINCT")?;
|
||||
} else {
|
||||
write!(f, " NULLS NOT DISTINCT")?;
|
||||
}
|
||||
}
|
||||
if !self.with.is_empty() {
|
||||
write!(f, " WITH ({})", display_comma_separated(&self.with))?;
|
||||
}
|
||||
if let Some(predicate) = &self.predicate {
|
||||
write!(f, " WHERE {predicate}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// CREATE TABLE statement.
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct CreateTable {
|
||||
pub or_replace: bool,
|
||||
pub temporary: bool,
|
||||
pub external: bool,
|
||||
pub global: Option<bool>,
|
||||
pub if_not_exists: bool,
|
||||
pub transient: bool,
|
||||
pub volatile: bool,
|
||||
pub iceberg: bool,
|
||||
/// Table name
|
||||
#[cfg_attr(feature = "visitor", visit(with = "visit_relation"))]
|
||||
pub name: ObjectName,
|
||||
/// Optional schema
|
||||
pub columns: Vec<ColumnDef>,
|
||||
pub constraints: Vec<TableConstraint>,
|
||||
pub hive_distribution: HiveDistributionStyle,
|
||||
pub hive_formats: Option<HiveFormat>,
|
||||
pub table_options: CreateTableOptions,
|
||||
pub file_format: Option<FileFormat>,
|
||||
pub location: Option<String>,
|
||||
pub query: Option<Box<Query>>,
|
||||
pub without_rowid: bool,
|
||||
pub like: Option<ObjectName>,
|
||||
pub clone: Option<ObjectName>,
|
||||
// For Hive dialect, the table comment is after the column definitions without `=`,
|
||||
// so the `comment` field is optional and different than the comment field in the general options list.
|
||||
// [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable)
|
||||
pub comment: Option<CommentDef>,
|
||||
pub on_commit: Option<OnCommit>,
|
||||
/// ClickHouse "ON CLUSTER" clause:
|
||||
/// <https://clickhouse.com/docs/en/sql-reference/distributed-ddl/>
|
||||
pub on_cluster: Option<Ident>,
|
||||
/// ClickHouse "PRIMARY KEY " clause.
|
||||
/// <https://clickhouse.com/docs/en/sql-reference/statements/create/table/>
|
||||
pub primary_key: Option<Box<Expr>>,
|
||||
/// ClickHouse "ORDER BY " clause. Note that omitted ORDER BY is different
|
||||
/// than empty (represented as ()), the latter meaning "no sorting".
|
||||
/// <https://clickhouse.com/docs/en/sql-reference/statements/create/table/>
|
||||
pub order_by: Option<OneOrManyWithParens<Expr>>,
|
||||
/// BigQuery: A partition expression for the table.
|
||||
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#partition_expression>
|
||||
pub partition_by: Option<Box<Expr>>,
|
||||
/// BigQuery: Table clustering column list.
|
||||
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#table_option_list>
|
||||
/// Snowflake: Table clustering list which contains base column, expressions on base columns.
|
||||
/// <https://docs.snowflake.com/en/user-guide/tables-clustering-keys#defining-a-clustering-key-for-a-table>
|
||||
pub cluster_by: Option<WrappedCollection<Vec<Expr>>>,
|
||||
/// Hive: Table clustering column list.
|
||||
/// <https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable>
|
||||
pub clustered_by: Option<ClusteredBy>,
|
||||
/// Postgres `INHERITs` clause, which contains the list of tables from which
|
||||
/// the new table inherits.
|
||||
/// <https://www.postgresql.org/docs/current/ddl-inherit.html>
|
||||
/// <https://www.postgresql.org/docs/current/sql-createtable.html#SQL-CREATETABLE-PARMS-INHERITS>
|
||||
pub inherits: Option<Vec<ObjectName>>,
|
||||
/// SQLite "STRICT" clause.
|
||||
/// if the "STRICT" table-option keyword is added to the end, after the closing ")",
|
||||
/// then strict typing rules apply to that table.
|
||||
pub strict: bool,
|
||||
/// Snowflake "COPY GRANTS" clause
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-table>
|
||||
pub copy_grants: bool,
|
||||
/// Snowflake "ENABLE_SCHEMA_EVOLUTION" clause
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-table>
|
||||
pub enable_schema_evolution: Option<bool>,
|
||||
/// Snowflake "CHANGE_TRACKING" clause
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-table>
|
||||
pub change_tracking: Option<bool>,
|
||||
/// Snowflake "DATA_RETENTION_TIME_IN_DAYS" clause
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-table>
|
||||
pub data_retention_time_in_days: Option<u64>,
|
||||
/// Snowflake "MAX_DATA_EXTENSION_TIME_IN_DAYS" clause
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-table>
|
||||
pub max_data_extension_time_in_days: Option<u64>,
|
||||
/// Snowflake "DEFAULT_DDL_COLLATION" clause
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-table>
|
||||
pub default_ddl_collation: Option<String>,
|
||||
/// Snowflake "WITH AGGREGATION POLICY" clause
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-table>
|
||||
pub with_aggregation_policy: Option<ObjectName>,
|
||||
/// Snowflake "WITH ROW ACCESS POLICY" clause
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-table>
|
||||
pub with_row_access_policy: Option<RowAccessPolicy>,
|
||||
/// Snowflake "WITH TAG" clause
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-table>
|
||||
pub with_tags: Option<Vec<Tag>>,
|
||||
/// Snowflake "EXTERNAL_VOLUME" clause for Iceberg tables
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-iceberg-table>
|
||||
pub external_volume: Option<String>,
|
||||
/// Snowflake "BASE_LOCATION" clause for Iceberg tables
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-iceberg-table>
|
||||
pub base_location: Option<String>,
|
||||
/// Snowflake "CATALOG" clause for Iceberg tables
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-iceberg-table>
|
||||
pub catalog: Option<String>,
|
||||
/// Snowflake "CATALOG_SYNC" clause for Iceberg tables
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-iceberg-table>
|
||||
pub catalog_sync: Option<String>,
|
||||
/// Snowflake "STORAGE_SERIALIZATION_POLICY" clause for Iceberg tables
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-iceberg-table>
|
||||
pub storage_serialization_policy: Option<StorageSerializationPolicy>,
|
||||
}
|
||||
|
||||
impl Display for CreateTable {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
// We want to allow the following options
|
||||
// Empty column list, allowed by PostgreSQL:
|
||||
// `CREATE TABLE t ()`
|
||||
// No columns provided for CREATE TABLE AS:
|
||||
// `CREATE TABLE t AS SELECT a from t2`
|
||||
// Columns provided for CREATE TABLE AS:
|
||||
// `CREATE TABLE t (a INT) AS SELECT a from t2`
|
||||
write!(
|
||||
f,
|
||||
"CREATE {or_replace}{external}{global}{temporary}{transient}{volatile}{iceberg}TABLE {if_not_exists}{name}",
|
||||
or_replace = if self.or_replace { "OR REPLACE " } else { "" },
|
||||
external = if self.external { "EXTERNAL " } else { "" },
|
||||
global = self.global
|
||||
.map(|global| {
|
||||
if global {
|
||||
"GLOBAL "
|
||||
} else {
|
||||
"LOCAL "
|
||||
}
|
||||
})
|
||||
.unwrap_or(""),
|
||||
if_not_exists = if self.if_not_exists { "IF NOT EXISTS " } else { "" },
|
||||
temporary = if self.temporary { "TEMPORARY " } else { "" },
|
||||
transient = if self.transient { "TRANSIENT " } else { "" },
|
||||
volatile = if self.volatile { "VOLATILE " } else { "" },
|
||||
// Only for Snowflake
|
||||
iceberg = if self.iceberg { "ICEBERG " } else { "" },
|
||||
name = self.name,
|
||||
)?;
|
||||
if let Some(on_cluster) = &self.on_cluster {
|
||||
write!(f, " ON CLUSTER {on_cluster}")?;
|
||||
}
|
||||
if !self.columns.is_empty() || !self.constraints.is_empty() {
|
||||
f.write_str(" (")?;
|
||||
NewLine.fmt(f)?;
|
||||
Indent(DisplayCommaSeparated(&self.columns)).fmt(f)?;
|
||||
if !self.columns.is_empty() && !self.constraints.is_empty() {
|
||||
f.write_str(",")?;
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
}
|
||||
Indent(DisplayCommaSeparated(&self.constraints)).fmt(f)?;
|
||||
NewLine.fmt(f)?;
|
||||
f.write_str(")")?;
|
||||
} else if self.query.is_none() && self.like.is_none() && self.clone.is_none() {
|
||||
// PostgreSQL allows `CREATE TABLE t ();`, but requires empty parens
|
||||
f.write_str(" ()")?;
|
||||
}
|
||||
|
||||
// Hive table comment should be after column definitions, please refer to:
|
||||
// [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable)
|
||||
if let Some(comment) = &self.comment {
|
||||
write!(f, " COMMENT '{comment}'")?;
|
||||
}
|
||||
|
||||
// Only for SQLite
|
||||
if self.without_rowid {
|
||||
write!(f, " WITHOUT ROWID")?;
|
||||
}
|
||||
|
||||
// Only for Hive
|
||||
if let Some(l) = &self.like {
|
||||
write!(f, " LIKE {l}")?;
|
||||
}
|
||||
|
||||
if let Some(c) = &self.clone {
|
||||
write!(f, " CLONE {c}")?;
|
||||
}
|
||||
|
||||
match &self.hive_distribution {
|
||||
HiveDistributionStyle::PARTITIONED { columns } => {
|
||||
write!(f, " PARTITIONED BY ({})", display_comma_separated(columns))?;
|
||||
}
|
||||
HiveDistributionStyle::SKEWED {
|
||||
columns,
|
||||
on,
|
||||
stored_as_directories,
|
||||
} => {
|
||||
write!(
|
||||
f,
|
||||
" SKEWED BY ({})) ON ({})",
|
||||
display_comma_separated(columns),
|
||||
display_comma_separated(on)
|
||||
)?;
|
||||
if *stored_as_directories {
|
||||
write!(f, " STORED AS DIRECTORIES")?;
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
if let Some(clustered_by) = &self.clustered_by {
|
||||
write!(f, " {clustered_by}")?;
|
||||
}
|
||||
|
||||
if let Some(HiveFormat {
|
||||
row_format,
|
||||
serde_properties,
|
||||
storage,
|
||||
location,
|
||||
}) = &self.hive_formats
|
||||
{
|
||||
match row_format {
|
||||
Some(HiveRowFormat::SERDE { class }) => write!(f, " ROW FORMAT SERDE '{class}'")?,
|
||||
Some(HiveRowFormat::DELIMITED { delimiters }) => {
|
||||
write!(f, " ROW FORMAT DELIMITED")?;
|
||||
if !delimiters.is_empty() {
|
||||
write!(f, " {}", display_separated(delimiters, " "))?;
|
||||
}
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
match storage {
|
||||
Some(HiveIOFormat::IOF {
|
||||
input_format,
|
||||
output_format,
|
||||
}) => write!(
|
||||
f,
|
||||
" STORED AS INPUTFORMAT {input_format} OUTPUTFORMAT {output_format}"
|
||||
)?,
|
||||
Some(HiveIOFormat::FileFormat { format }) if !self.external => {
|
||||
write!(f, " STORED AS {format}")?
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
if let Some(serde_properties) = serde_properties.as_ref() {
|
||||
write!(
|
||||
f,
|
||||
" WITH SERDEPROPERTIES ({})",
|
||||
display_comma_separated(serde_properties)
|
||||
)?;
|
||||
}
|
||||
if !self.external {
|
||||
if let Some(loc) = location {
|
||||
write!(f, " LOCATION '{loc}'")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
if self.external {
|
||||
if let Some(file_format) = self.file_format {
|
||||
write!(f, " STORED AS {file_format}")?;
|
||||
}
|
||||
write!(f, " LOCATION '{}'", self.location.as_ref().unwrap())?;
|
||||
}
|
||||
|
||||
match &self.table_options {
|
||||
options @ CreateTableOptions::With(_)
|
||||
| options @ CreateTableOptions::Plain(_)
|
||||
| options @ CreateTableOptions::TableProperties(_) => write!(f, " {options}")?,
|
||||
_ => (),
|
||||
}
|
||||
|
||||
if let Some(primary_key) = &self.primary_key {
|
||||
write!(f, " PRIMARY KEY {primary_key}")?;
|
||||
}
|
||||
if let Some(order_by) = &self.order_by {
|
||||
write!(f, " ORDER BY {order_by}")?;
|
||||
}
|
||||
if let Some(inherits) = &self.inherits {
|
||||
write!(f, " INHERITS ({})", display_comma_separated(inherits))?;
|
||||
}
|
||||
if let Some(partition_by) = self.partition_by.as_ref() {
|
||||
write!(f, " PARTITION BY {partition_by}")?;
|
||||
}
|
||||
if let Some(cluster_by) = self.cluster_by.as_ref() {
|
||||
write!(f, " CLUSTER BY {cluster_by}")?;
|
||||
}
|
||||
if let options @ CreateTableOptions::Options(_) = &self.table_options {
|
||||
write!(f, " {options}")?;
|
||||
}
|
||||
if let Some(external_volume) = self.external_volume.as_ref() {
|
||||
write!(f, " EXTERNAL_VOLUME = '{external_volume}'")?;
|
||||
}
|
||||
|
||||
if let Some(catalog) = self.catalog.as_ref() {
|
||||
write!(f, " CATALOG = '{catalog}'")?;
|
||||
}
|
||||
|
||||
if self.iceberg {
|
||||
if let Some(base_location) = self.base_location.as_ref() {
|
||||
write!(f, " BASE_LOCATION = '{base_location}'")?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(catalog_sync) = self.catalog_sync.as_ref() {
|
||||
write!(f, " CATALOG_SYNC = '{catalog_sync}'")?;
|
||||
}
|
||||
|
||||
if let Some(storage_serialization_policy) = self.storage_serialization_policy.as_ref() {
|
||||
write!(
|
||||
f,
|
||||
" STORAGE_SERIALIZATION_POLICY = {storage_serialization_policy}"
|
||||
)?;
|
||||
}
|
||||
|
||||
if self.copy_grants {
|
||||
write!(f, " COPY GRANTS")?;
|
||||
}
|
||||
|
||||
if let Some(is_enabled) = self.enable_schema_evolution {
|
||||
write!(
|
||||
f,
|
||||
" ENABLE_SCHEMA_EVOLUTION={}",
|
||||
if is_enabled { "TRUE" } else { "FALSE" }
|
||||
)?;
|
||||
}
|
||||
|
||||
if let Some(is_enabled) = self.change_tracking {
|
||||
write!(
|
||||
f,
|
||||
" CHANGE_TRACKING={}",
|
||||
if is_enabled { "TRUE" } else { "FALSE" }
|
||||
)?;
|
||||
}
|
||||
|
||||
if let Some(data_retention_time_in_days) = self.data_retention_time_in_days {
|
||||
write!(
|
||||
f,
|
||||
" DATA_RETENTION_TIME_IN_DAYS={data_retention_time_in_days}",
|
||||
)?;
|
||||
}
|
||||
|
||||
if let Some(max_data_extension_time_in_days) = self.max_data_extension_time_in_days {
|
||||
write!(
|
||||
f,
|
||||
" MAX_DATA_EXTENSION_TIME_IN_DAYS={max_data_extension_time_in_days}",
|
||||
)?;
|
||||
}
|
||||
|
||||
if let Some(default_ddl_collation) = &self.default_ddl_collation {
|
||||
write!(f, " DEFAULT_DDL_COLLATION='{default_ddl_collation}'",)?;
|
||||
}
|
||||
|
||||
if let Some(with_aggregation_policy) = &self.with_aggregation_policy {
|
||||
write!(f, " WITH AGGREGATION POLICY {with_aggregation_policy}",)?;
|
||||
}
|
||||
|
||||
if let Some(row_access_policy) = &self.with_row_access_policy {
|
||||
write!(f, " {row_access_policy}",)?;
|
||||
}
|
||||
|
||||
if let Some(tag) = &self.with_tags {
|
||||
write!(f, " WITH TAG ({})", display_comma_separated(tag.as_slice()))?;
|
||||
}
|
||||
|
||||
if self.on_commit.is_some() {
|
||||
let on_commit = match self.on_commit {
|
||||
Some(OnCommit::DeleteRows) => "ON COMMIT DELETE ROWS",
|
||||
Some(OnCommit::PreserveRows) => "ON COMMIT PRESERVE ROWS",
|
||||
Some(OnCommit::Drop) => "ON COMMIT DROP",
|
||||
None => "",
|
||||
};
|
||||
write!(f, " {on_commit}")?;
|
||||
}
|
||||
if self.strict {
|
||||
write!(f, " STRICT")?;
|
||||
}
|
||||
if let Some(query) = &self.query {
|
||||
write!(f, " AS {query}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// INSERT statement.
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct Insert {
|
||||
/// Only for Sqlite
|
||||
pub or: Option<SqliteOnConflict>,
|
||||
/// Only for mysql
|
||||
pub ignore: bool,
|
||||
/// INTO - optional keyword
|
||||
pub into: bool,
|
||||
/// TABLE
|
||||
pub table: TableObject,
|
||||
/// table_name as foo (for PostgreSQL)
|
||||
pub table_alias: Option<Ident>,
|
||||
/// COLUMNS
|
||||
pub columns: Vec<Ident>,
|
||||
/// Overwrite (Hive)
|
||||
pub overwrite: bool,
|
||||
/// A SQL query that specifies what to insert
|
||||
pub source: Option<Box<Query>>,
|
||||
/// MySQL `INSERT INTO ... SET`
|
||||
/// See: <https://dev.mysql.com/doc/refman/8.4/en/insert.html>
|
||||
pub assignments: Vec<Assignment>,
|
||||
/// partitioned insert (Hive)
|
||||
pub partitioned: Option<Vec<Expr>>,
|
||||
/// Columns defined after PARTITION
|
||||
pub after_columns: Vec<Ident>,
|
||||
/// whether the insert has the table keyword (Hive)
|
||||
pub has_table_keyword: bool,
|
||||
pub on: Option<OnInsert>,
|
||||
/// RETURNING
|
||||
pub returning: Option<Vec<SelectItem>>,
|
||||
/// Only for mysql
|
||||
pub replace_into: bool,
|
||||
/// Only for mysql
|
||||
pub priority: Option<MysqlInsertPriority>,
|
||||
/// Only for mysql
|
||||
pub insert_alias: Option<InsertAliases>,
|
||||
/// Settings used for ClickHouse.
|
||||
///
|
||||
/// ClickHouse syntax: `INSERT INTO tbl SETTINGS format_template_resultset = '/some/path/resultset.format'`
|
||||
///
|
||||
/// [ClickHouse `INSERT INTO`](https://clickhouse.com/docs/en/sql-reference/statements/insert-into)
|
||||
pub settings: Option<Vec<Setting>>,
|
||||
/// Format for `INSERT` statement when not using standard SQL format. Can be e.g. `CSV`,
|
||||
/// `JSON`, `JSONAsString`, `LineAsString` and more.
|
||||
///
|
||||
/// ClickHouse syntax: `INSERT INTO tbl FORMAT JSONEachRow {"foo": 1, "bar": 2}, {"foo": 3}`
|
||||
///
|
||||
/// [ClickHouse formats JSON insert](https://clickhouse.com/docs/en/interfaces/formats#json-inserting-data)
|
||||
pub format_clause: Option<InputFormatClause>,
|
||||
}
|
||||
|
||||
impl Display for Insert {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let table_name = if let Some(alias) = &self.table_alias {
|
||||
format!("{0} AS {alias}", self.table)
|
||||
} else {
|
||||
self.table.to_string()
|
||||
};
|
||||
|
||||
if let Some(on_conflict) = self.or {
|
||||
write!(f, "INSERT {on_conflict} INTO {table_name} ")?;
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"{start}",
|
||||
start = if self.replace_into {
|
||||
"REPLACE"
|
||||
} else {
|
||||
"INSERT"
|
||||
},
|
||||
)?;
|
||||
if let Some(priority) = self.priority {
|
||||
write!(f, " {priority}",)?;
|
||||
}
|
||||
|
||||
write!(
|
||||
f,
|
||||
"{ignore}{over}{int}{tbl} {table_name} ",
|
||||
table_name = table_name,
|
||||
ignore = if self.ignore { " IGNORE" } else { "" },
|
||||
over = if self.overwrite { " OVERWRITE" } else { "" },
|
||||
int = if self.into { " INTO" } else { "" },
|
||||
tbl = if self.has_table_keyword { " TABLE" } else { "" },
|
||||
)?;
|
||||
}
|
||||
if !self.columns.is_empty() {
|
||||
write!(f, "({})", display_comma_separated(&self.columns))?;
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
}
|
||||
if let Some(ref parts) = self.partitioned {
|
||||
if !parts.is_empty() {
|
||||
write!(f, "PARTITION ({})", display_comma_separated(parts))?;
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
}
|
||||
}
|
||||
if !self.after_columns.is_empty() {
|
||||
write!(f, "({})", display_comma_separated(&self.after_columns))?;
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
}
|
||||
|
||||
if let Some(settings) = &self.settings {
|
||||
write!(f, "SETTINGS {}", display_comma_separated(settings))?;
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
}
|
||||
|
||||
if let Some(source) = &self.source {
|
||||
source.fmt(f)?;
|
||||
} else if !self.assignments.is_empty() {
|
||||
write!(f, "SET")?;
|
||||
indented_list(f, &self.assignments)?;
|
||||
} else if let Some(format_clause) = &self.format_clause {
|
||||
format_clause.fmt(f)?;
|
||||
} else if self.columns.is_empty() {
|
||||
write!(f, "DEFAULT VALUES")?;
|
||||
}
|
||||
|
||||
if let Some(insert_alias) = &self.insert_alias {
|
||||
write!(f, " AS {0}", insert_alias.row_alias)?;
|
||||
|
||||
if let Some(col_aliases) = &insert_alias.col_aliases {
|
||||
if !col_aliases.is_empty() {
|
||||
write!(f, " ({})", display_comma_separated(col_aliases))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(on) = &self.on {
|
||||
write!(f, "{on}")?;
|
||||
}
|
||||
|
||||
if let Some(returning) = &self.returning {
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
f.write_str("RETURNING")?;
|
||||
indented_list(f, returning)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// DELETE statement.
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct Delete {
|
||||
/// Multi tables delete are supported in mysql
|
||||
pub tables: Vec<ObjectName>,
|
||||
/// FROM
|
||||
pub from: FromTable,
|
||||
/// USING (Snowflake, Postgres, MySQL)
|
||||
pub using: Option<Vec<TableWithJoins>>,
|
||||
/// WHERE
|
||||
pub selection: Option<Expr>,
|
||||
/// RETURNING
|
||||
pub returning: Option<Vec<SelectItem>>,
|
||||
/// ORDER BY (MySQL)
|
||||
pub order_by: Vec<OrderByExpr>,
|
||||
/// LIMIT (MySQL)
|
||||
pub limit: Option<Expr>,
|
||||
}
|
||||
|
||||
impl Display for Delete {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("DELETE")?;
|
||||
if !self.tables.is_empty() {
|
||||
indented_list(f, &self.tables)?;
|
||||
}
|
||||
match &self.from {
|
||||
FromTable::WithFromKeyword(from) => {
|
||||
f.write_str(" FROM")?;
|
||||
indented_list(f, from)?;
|
||||
}
|
||||
FromTable::WithoutKeyword(from) => {
|
||||
indented_list(f, from)?;
|
||||
}
|
||||
}
|
||||
if let Some(using) = &self.using {
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
f.write_str("USING")?;
|
||||
indented_list(f, using)?;
|
||||
}
|
||||
if let Some(selection) = &self.selection {
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
f.write_str("WHERE")?;
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
Indent(selection).fmt(f)?;
|
||||
}
|
||||
if let Some(returning) = &self.returning {
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
f.write_str("RETURNING")?;
|
||||
indented_list(f, returning)?;
|
||||
}
|
||||
if !self.order_by.is_empty() {
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
f.write_str("ORDER BY")?;
|
||||
indented_list(f, &self.order_by)?;
|
||||
}
|
||||
if let Some(limit) = &self.limit {
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
f.write_str("LIMIT")?;
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
Indent(limit).fmt(f)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,136 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
use core::cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd};
|
||||
use core::fmt::{self, Debug, Formatter};
|
||||
use core::hash::{Hash, Hasher};
|
||||
|
||||
use crate::tokenizer::TokenWithSpan;
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[cfg(feature = "visitor")]
|
||||
use sqlparser_derive::{Visit, VisitMut};
|
||||
|
||||
/// A wrapper over [`TokenWithSpan`]s that ignores the token and source
|
||||
/// location in comparisons and hashing.
|
||||
///
|
||||
/// This type is used when the token and location is not relevant for semantics,
|
||||
/// but is still needed for accurate source location tracking, for example, in
|
||||
/// the nodes in the [ast](crate::ast) module.
|
||||
///
|
||||
/// Note: **All** `AttachedTokens` are equal.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Same token, different location are equal
|
||||
/// ```
|
||||
/// # use sqlparser::ast::helpers::attached_token::AttachedToken;
|
||||
/// # use sqlparser::tokenizer::{Location, Span, Token, TokenWithLocation};
|
||||
/// // commas @ line 1, column 10
|
||||
/// let tok1 = TokenWithLocation::new(
|
||||
/// Token::Comma,
|
||||
/// Span::new(Location::new(1, 10), Location::new(1, 11)),
|
||||
/// );
|
||||
/// // commas @ line 2, column 20
|
||||
/// let tok2 = TokenWithLocation::new(
|
||||
/// Token::Comma,
|
||||
/// Span::new(Location::new(2, 20), Location::new(2, 21)),
|
||||
/// );
|
||||
///
|
||||
/// assert_ne!(tok1, tok2); // token with locations are *not* equal
|
||||
/// assert_eq!(AttachedToken(tok1), AttachedToken(tok2)); // attached tokens are
|
||||
/// ```
|
||||
///
|
||||
/// Different token, different location are equal 🤯
|
||||
///
|
||||
/// ```
|
||||
/// # use sqlparser::ast::helpers::attached_token::AttachedToken;
|
||||
/// # use sqlparser::tokenizer::{Location, Span, Token, TokenWithLocation};
|
||||
/// // commas @ line 1, column 10
|
||||
/// let tok1 = TokenWithLocation::new(
|
||||
/// Token::Comma,
|
||||
/// Span::new(Location::new(1, 10), Location::new(1, 11)),
|
||||
/// );
|
||||
/// // period @ line 2, column 20
|
||||
/// let tok2 = TokenWithLocation::new(
|
||||
/// Token::Period,
|
||||
/// Span::new(Location::new(2, 10), Location::new(2, 21)),
|
||||
/// );
|
||||
///
|
||||
/// assert_ne!(tok1, tok2); // token with locations are *not* equal
|
||||
/// assert_eq!(AttachedToken(tok1), AttachedToken(tok2)); // attached tokens are
|
||||
/// ```
|
||||
/// // period @ line 2, column 20
|
||||
#[derive(Clone)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct AttachedToken(pub TokenWithSpan);
|
||||
|
||||
impl AttachedToken {
|
||||
/// Return a new Empty AttachedToken
|
||||
pub fn empty() -> Self {
|
||||
AttachedToken(TokenWithSpan::new_eof())
|
||||
}
|
||||
}
|
||||
|
||||
// Conditional Implementations
|
||||
impl Debug for AttachedToken {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
// Blanket Implementations
|
||||
impl PartialEq for AttachedToken {
|
||||
fn eq(&self, _: &Self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for AttachedToken {}
|
||||
|
||||
impl PartialOrd for AttachedToken {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for AttachedToken {
|
||||
fn cmp(&self, _: &Self) -> Ordering {
|
||||
Ordering::Equal
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for AttachedToken {
|
||||
fn hash<H: Hasher>(&self, _state: &mut H) {
|
||||
// Do nothing
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenWithSpan> for AttachedToken {
|
||||
fn from(value: TokenWithSpan) -> Self {
|
||||
AttachedToken(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AttachedToken> for TokenWithSpan {
|
||||
fn from(value: AttachedToken) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
|
@ -1,89 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
//! Key-value options for SQL statements.
|
||||
//! See [this page](https://docs.snowflake.com/en/sql-reference/commands-data-loading) for more details.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::string::String;
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt;
|
||||
use core::fmt::Formatter;
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[cfg(feature = "visitor")]
|
||||
use sqlparser_derive::{Visit, VisitMut};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct KeyValueOptions {
|
||||
pub options: Vec<KeyValueOption>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum KeyValueOptionType {
|
||||
STRING,
|
||||
BOOLEAN,
|
||||
ENUM,
|
||||
NUMBER,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct KeyValueOption {
|
||||
pub option_name: String,
|
||||
pub option_type: KeyValueOptionType,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for KeyValueOptions {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
if !self.options.is_empty() {
|
||||
let mut first = false;
|
||||
for option in &self.options {
|
||||
if !first {
|
||||
first = true;
|
||||
} else {
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
write!(f, "{option}")?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for KeyValueOption {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self.option_type {
|
||||
KeyValueOptionType::STRING => {
|
||||
write!(f, "{}='{}'", self.option_name, self.value)?;
|
||||
}
|
||||
KeyValueOptionType::ENUM | KeyValueOptionType::BOOLEAN | KeyValueOptionType::NUMBER => {
|
||||
write!(f, "{}={}", self.option_name, self.value)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
pub mod attached_token;
|
||||
pub mod key_value_options;
|
||||
pub mod stmt_create_table;
|
||||
pub mod stmt_data_loading;
|
|
@ -1,580 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{boxed::Box, format, string::String, vec, vec::Vec};
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[cfg(feature = "visitor")]
|
||||
use sqlparser_derive::{Visit, VisitMut};
|
||||
|
||||
use super::super::dml::CreateTable;
|
||||
use crate::ast::{
|
||||
ClusteredBy, ColumnDef, CommentDef, CreateTableOptions, Expr, FileFormat,
|
||||
HiveDistributionStyle, HiveFormat, Ident, ObjectName, OnCommit, OneOrManyWithParens, Query,
|
||||
RowAccessPolicy, Statement, StorageSerializationPolicy, TableConstraint, Tag,
|
||||
WrappedCollection,
|
||||
};
|
||||
|
||||
use crate::parser::ParserError;
|
||||
|
||||
/// Builder for create table statement variant ([1]).
|
||||
///
|
||||
/// This structure helps building and accessing a create table with more ease, without needing to:
|
||||
/// - Match the enum itself a lot of times; or
|
||||
/// - Moving a lot of variables around the code.
|
||||
///
|
||||
/// # Example
|
||||
/// ```rust
|
||||
/// use sqlparser::ast::helpers::stmt_create_table::CreateTableBuilder;
|
||||
/// use sqlparser::ast::{ColumnDef, DataType, Ident, ObjectName};
|
||||
/// let builder = CreateTableBuilder::new(ObjectName::from(vec![Ident::new("table_name")]))
|
||||
/// .if_not_exists(true)
|
||||
/// .columns(vec![ColumnDef {
|
||||
/// name: Ident::new("c1"),
|
||||
/// data_type: DataType::Int(None),
|
||||
/// options: vec![],
|
||||
/// }]);
|
||||
/// // You can access internal elements with ease
|
||||
/// assert!(builder.if_not_exists);
|
||||
/// // Convert to a statement
|
||||
/// assert_eq!(
|
||||
/// builder.build().to_string(),
|
||||
/// "CREATE TABLE IF NOT EXISTS table_name (c1 INT)"
|
||||
/// )
|
||||
/// ```
|
||||
///
|
||||
/// [1]: crate::ast::Statement::CreateTable
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct CreateTableBuilder {
|
||||
pub or_replace: bool,
|
||||
pub temporary: bool,
|
||||
pub external: bool,
|
||||
pub global: Option<bool>,
|
||||
pub if_not_exists: bool,
|
||||
pub transient: bool,
|
||||
pub volatile: bool,
|
||||
pub iceberg: bool,
|
||||
pub name: ObjectName,
|
||||
pub columns: Vec<ColumnDef>,
|
||||
pub constraints: Vec<TableConstraint>,
|
||||
pub hive_distribution: HiveDistributionStyle,
|
||||
pub hive_formats: Option<HiveFormat>,
|
||||
pub file_format: Option<FileFormat>,
|
||||
pub location: Option<String>,
|
||||
pub query: Option<Box<Query>>,
|
||||
pub without_rowid: bool,
|
||||
pub like: Option<ObjectName>,
|
||||
pub clone: Option<ObjectName>,
|
||||
pub comment: Option<CommentDef>,
|
||||
pub on_commit: Option<OnCommit>,
|
||||
pub on_cluster: Option<Ident>,
|
||||
pub primary_key: Option<Box<Expr>>,
|
||||
pub order_by: Option<OneOrManyWithParens<Expr>>,
|
||||
pub partition_by: Option<Box<Expr>>,
|
||||
pub cluster_by: Option<WrappedCollection<Vec<Expr>>>,
|
||||
pub clustered_by: Option<ClusteredBy>,
|
||||
pub inherits: Option<Vec<ObjectName>>,
|
||||
pub strict: bool,
|
||||
pub copy_grants: bool,
|
||||
pub enable_schema_evolution: Option<bool>,
|
||||
pub change_tracking: Option<bool>,
|
||||
pub data_retention_time_in_days: Option<u64>,
|
||||
pub max_data_extension_time_in_days: Option<u64>,
|
||||
pub default_ddl_collation: Option<String>,
|
||||
pub with_aggregation_policy: Option<ObjectName>,
|
||||
pub with_row_access_policy: Option<RowAccessPolicy>,
|
||||
pub with_tags: Option<Vec<Tag>>,
|
||||
pub base_location: Option<String>,
|
||||
pub external_volume: Option<String>,
|
||||
pub catalog: Option<String>,
|
||||
pub catalog_sync: Option<String>,
|
||||
pub storage_serialization_policy: Option<StorageSerializationPolicy>,
|
||||
pub table_options: CreateTableOptions,
|
||||
}
|
||||
|
||||
impl CreateTableBuilder {
|
||||
pub fn new(name: ObjectName) -> Self {
|
||||
Self {
|
||||
or_replace: false,
|
||||
temporary: false,
|
||||
external: false,
|
||||
global: None,
|
||||
if_not_exists: false,
|
||||
transient: false,
|
||||
volatile: false,
|
||||
iceberg: false,
|
||||
name,
|
||||
columns: vec![],
|
||||
constraints: vec![],
|
||||
hive_distribution: HiveDistributionStyle::NONE,
|
||||
hive_formats: None,
|
||||
file_format: None,
|
||||
location: None,
|
||||
query: None,
|
||||
without_rowid: false,
|
||||
like: None,
|
||||
clone: None,
|
||||
comment: None,
|
||||
on_commit: None,
|
||||
on_cluster: None,
|
||||
primary_key: None,
|
||||
order_by: None,
|
||||
partition_by: None,
|
||||
cluster_by: None,
|
||||
clustered_by: None,
|
||||
inherits: None,
|
||||
strict: false,
|
||||
copy_grants: false,
|
||||
enable_schema_evolution: None,
|
||||
change_tracking: None,
|
||||
data_retention_time_in_days: None,
|
||||
max_data_extension_time_in_days: None,
|
||||
default_ddl_collation: None,
|
||||
with_aggregation_policy: None,
|
||||
with_row_access_policy: None,
|
||||
with_tags: None,
|
||||
base_location: None,
|
||||
external_volume: None,
|
||||
catalog: None,
|
||||
catalog_sync: None,
|
||||
storage_serialization_policy: None,
|
||||
table_options: CreateTableOptions::None,
|
||||
}
|
||||
}
|
||||
pub fn or_replace(mut self, or_replace: bool) -> Self {
|
||||
self.or_replace = or_replace;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn temporary(mut self, temporary: bool) -> Self {
|
||||
self.temporary = temporary;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn external(mut self, external: bool) -> Self {
|
||||
self.external = external;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn global(mut self, global: Option<bool>) -> Self {
|
||||
self.global = global;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn if_not_exists(mut self, if_not_exists: bool) -> Self {
|
||||
self.if_not_exists = if_not_exists;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn transient(mut self, transient: bool) -> Self {
|
||||
self.transient = transient;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn volatile(mut self, volatile: bool) -> Self {
|
||||
self.volatile = volatile;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn iceberg(mut self, iceberg: bool) -> Self {
|
||||
self.iceberg = iceberg;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn columns(mut self, columns: Vec<ColumnDef>) -> Self {
|
||||
self.columns = columns;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn constraints(mut self, constraints: Vec<TableConstraint>) -> Self {
|
||||
self.constraints = constraints;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn hive_distribution(mut self, hive_distribution: HiveDistributionStyle) -> Self {
|
||||
self.hive_distribution = hive_distribution;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn hive_formats(mut self, hive_formats: Option<HiveFormat>) -> Self {
|
||||
self.hive_formats = hive_formats;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn file_format(mut self, file_format: Option<FileFormat>) -> Self {
|
||||
self.file_format = file_format;
|
||||
self
|
||||
}
|
||||
pub fn location(mut self, location: Option<String>) -> Self {
|
||||
self.location = location;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn query(mut self, query: Option<Box<Query>>) -> Self {
|
||||
self.query = query;
|
||||
self
|
||||
}
|
||||
pub fn without_rowid(mut self, without_rowid: bool) -> Self {
|
||||
self.without_rowid = without_rowid;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn like(mut self, like: Option<ObjectName>) -> Self {
|
||||
self.like = like;
|
||||
self
|
||||
}
|
||||
|
||||
// Different name to allow the object to be cloned
|
||||
pub fn clone_clause(mut self, clone: Option<ObjectName>) -> Self {
|
||||
self.clone = clone;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn comment_after_column_def(mut self, comment: Option<CommentDef>) -> Self {
|
||||
self.comment = comment;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_commit(mut self, on_commit: Option<OnCommit>) -> Self {
|
||||
self.on_commit = on_commit;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn on_cluster(mut self, on_cluster: Option<Ident>) -> Self {
|
||||
self.on_cluster = on_cluster;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn primary_key(mut self, primary_key: Option<Box<Expr>>) -> Self {
|
||||
self.primary_key = primary_key;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn order_by(mut self, order_by: Option<OneOrManyWithParens<Expr>>) -> Self {
|
||||
self.order_by = order_by;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn partition_by(mut self, partition_by: Option<Box<Expr>>) -> Self {
|
||||
self.partition_by = partition_by;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn cluster_by(mut self, cluster_by: Option<WrappedCollection<Vec<Expr>>>) -> Self {
|
||||
self.cluster_by = cluster_by;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn clustered_by(mut self, clustered_by: Option<ClusteredBy>) -> Self {
|
||||
self.clustered_by = clustered_by;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn inherits(mut self, inherits: Option<Vec<ObjectName>>) -> Self {
|
||||
self.inherits = inherits;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn strict(mut self, strict: bool) -> Self {
|
||||
self.strict = strict;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn copy_grants(mut self, copy_grants: bool) -> Self {
|
||||
self.copy_grants = copy_grants;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn enable_schema_evolution(mut self, enable_schema_evolution: Option<bool>) -> Self {
|
||||
self.enable_schema_evolution = enable_schema_evolution;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn change_tracking(mut self, change_tracking: Option<bool>) -> Self {
|
||||
self.change_tracking = change_tracking;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn data_retention_time_in_days(mut self, data_retention_time_in_days: Option<u64>) -> Self {
|
||||
self.data_retention_time_in_days = data_retention_time_in_days;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn max_data_extension_time_in_days(
|
||||
mut self,
|
||||
max_data_extension_time_in_days: Option<u64>,
|
||||
) -> Self {
|
||||
self.max_data_extension_time_in_days = max_data_extension_time_in_days;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn default_ddl_collation(mut self, default_ddl_collation: Option<String>) -> Self {
|
||||
self.default_ddl_collation = default_ddl_collation;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_aggregation_policy(mut self, with_aggregation_policy: Option<ObjectName>) -> Self {
|
||||
self.with_aggregation_policy = with_aggregation_policy;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_row_access_policy(
|
||||
mut self,
|
||||
with_row_access_policy: Option<RowAccessPolicy>,
|
||||
) -> Self {
|
||||
self.with_row_access_policy = with_row_access_policy;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_tags(mut self, with_tags: Option<Vec<Tag>>) -> Self {
|
||||
self.with_tags = with_tags;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn base_location(mut self, base_location: Option<String>) -> Self {
|
||||
self.base_location = base_location;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn external_volume(mut self, external_volume: Option<String>) -> Self {
|
||||
self.external_volume = external_volume;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn catalog(mut self, catalog: Option<String>) -> Self {
|
||||
self.catalog = catalog;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn catalog_sync(mut self, catalog_sync: Option<String>) -> Self {
|
||||
self.catalog_sync = catalog_sync;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn storage_serialization_policy(
|
||||
mut self,
|
||||
storage_serialization_policy: Option<StorageSerializationPolicy>,
|
||||
) -> Self {
|
||||
self.storage_serialization_policy = storage_serialization_policy;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn table_options(mut self, table_options: CreateTableOptions) -> Self {
|
||||
self.table_options = table_options;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn build(self) -> Statement {
|
||||
Statement::CreateTable(CreateTable {
|
||||
or_replace: self.or_replace,
|
||||
temporary: self.temporary,
|
||||
external: self.external,
|
||||
global: self.global,
|
||||
if_not_exists: self.if_not_exists,
|
||||
transient: self.transient,
|
||||
volatile: self.volatile,
|
||||
iceberg: self.iceberg,
|
||||
name: self.name,
|
||||
columns: self.columns,
|
||||
constraints: self.constraints,
|
||||
hive_distribution: self.hive_distribution,
|
||||
hive_formats: self.hive_formats,
|
||||
file_format: self.file_format,
|
||||
location: self.location,
|
||||
query: self.query,
|
||||
without_rowid: self.without_rowid,
|
||||
like: self.like,
|
||||
clone: self.clone,
|
||||
comment: self.comment,
|
||||
on_commit: self.on_commit,
|
||||
on_cluster: self.on_cluster,
|
||||
primary_key: self.primary_key,
|
||||
order_by: self.order_by,
|
||||
partition_by: self.partition_by,
|
||||
cluster_by: self.cluster_by,
|
||||
clustered_by: self.clustered_by,
|
||||
inherits: self.inherits,
|
||||
strict: self.strict,
|
||||
copy_grants: self.copy_grants,
|
||||
enable_schema_evolution: self.enable_schema_evolution,
|
||||
change_tracking: self.change_tracking,
|
||||
data_retention_time_in_days: self.data_retention_time_in_days,
|
||||
max_data_extension_time_in_days: self.max_data_extension_time_in_days,
|
||||
default_ddl_collation: self.default_ddl_collation,
|
||||
with_aggregation_policy: self.with_aggregation_policy,
|
||||
with_row_access_policy: self.with_row_access_policy,
|
||||
with_tags: self.with_tags,
|
||||
base_location: self.base_location,
|
||||
external_volume: self.external_volume,
|
||||
catalog: self.catalog,
|
||||
catalog_sync: self.catalog_sync,
|
||||
storage_serialization_policy: self.storage_serialization_policy,
|
||||
table_options: self.table_options,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<Statement> for CreateTableBuilder {
|
||||
type Error = ParserError;
|
||||
|
||||
// As the builder can be transformed back to a statement, it shouldn't be a problem to take the
|
||||
// ownership.
|
||||
fn try_from(stmt: Statement) -> Result<Self, Self::Error> {
|
||||
match stmt {
|
||||
Statement::CreateTable(CreateTable {
|
||||
or_replace,
|
||||
temporary,
|
||||
external,
|
||||
global,
|
||||
if_not_exists,
|
||||
transient,
|
||||
volatile,
|
||||
iceberg,
|
||||
name,
|
||||
columns,
|
||||
constraints,
|
||||
hive_distribution,
|
||||
hive_formats,
|
||||
file_format,
|
||||
location,
|
||||
query,
|
||||
without_rowid,
|
||||
like,
|
||||
clone,
|
||||
comment,
|
||||
on_commit,
|
||||
on_cluster,
|
||||
primary_key,
|
||||
order_by,
|
||||
partition_by,
|
||||
cluster_by,
|
||||
clustered_by,
|
||||
inherits,
|
||||
strict,
|
||||
copy_grants,
|
||||
enable_schema_evolution,
|
||||
change_tracking,
|
||||
data_retention_time_in_days,
|
||||
max_data_extension_time_in_days,
|
||||
default_ddl_collation,
|
||||
with_aggregation_policy,
|
||||
with_row_access_policy,
|
||||
with_tags,
|
||||
base_location,
|
||||
external_volume,
|
||||
catalog,
|
||||
catalog_sync,
|
||||
storage_serialization_policy,
|
||||
table_options,
|
||||
}) => Ok(Self {
|
||||
or_replace,
|
||||
temporary,
|
||||
external,
|
||||
global,
|
||||
if_not_exists,
|
||||
transient,
|
||||
name,
|
||||
columns,
|
||||
constraints,
|
||||
hive_distribution,
|
||||
hive_formats,
|
||||
file_format,
|
||||
location,
|
||||
query,
|
||||
without_rowid,
|
||||
like,
|
||||
clone,
|
||||
comment,
|
||||
on_commit,
|
||||
on_cluster,
|
||||
primary_key,
|
||||
order_by,
|
||||
partition_by,
|
||||
cluster_by,
|
||||
clustered_by,
|
||||
inherits,
|
||||
strict,
|
||||
iceberg,
|
||||
copy_grants,
|
||||
enable_schema_evolution,
|
||||
change_tracking,
|
||||
data_retention_time_in_days,
|
||||
max_data_extension_time_in_days,
|
||||
default_ddl_collation,
|
||||
with_aggregation_policy,
|
||||
with_row_access_policy,
|
||||
with_tags,
|
||||
volatile,
|
||||
base_location,
|
||||
external_volume,
|
||||
catalog,
|
||||
catalog_sync,
|
||||
storage_serialization_policy,
|
||||
table_options,
|
||||
}),
|
||||
_ => Err(ParserError::ParserError(format!(
|
||||
"Expected create table statement, but received: {stmt}"
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper return type when parsing configuration for a `CREATE TABLE` statement.
|
||||
#[derive(Default)]
|
||||
pub(crate) struct CreateTableConfiguration {
|
||||
pub partition_by: Option<Box<Expr>>,
|
||||
pub cluster_by: Option<WrappedCollection<Vec<Expr>>>,
|
||||
pub inherits: Option<Vec<ObjectName>>,
|
||||
pub table_options: CreateTableOptions,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::ast::helpers::stmt_create_table::CreateTableBuilder;
|
||||
use crate::ast::{Ident, ObjectName, Statement};
|
||||
use crate::parser::ParserError;
|
||||
|
||||
#[test]
|
||||
pub fn test_from_valid_statement() {
|
||||
let builder = CreateTableBuilder::new(ObjectName::from(vec![Ident::new("table_name")]));
|
||||
|
||||
let stmt = builder.clone().build();
|
||||
|
||||
assert_eq!(builder, CreateTableBuilder::try_from(stmt).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_from_invalid_statement() {
|
||||
let stmt = Statement::Commit {
|
||||
chain: false,
|
||||
end: false,
|
||||
modifier: None,
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
CreateTableBuilder::try_from(stmt).unwrap_err(),
|
||||
ParserError::ParserError(
|
||||
"Expected create table statement, but received: COMMIT".to_owned()
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,133 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
//! AST types specific to loading and unloading syntax, like one available in Snowflake which
|
||||
//! contains: STAGE ddl operations, PUT upload or COPY INTO
|
||||
//! See [this page](https://docs.snowflake.com/en/sql-reference/commands-data-loading) for more details.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::string::String;
|
||||
use core::fmt;
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::ast::helpers::key_value_options::KeyValueOptions;
|
||||
use crate::ast::{Ident, ObjectName, SelectItem};
|
||||
#[cfg(feature = "visitor")]
|
||||
use sqlparser_derive::{Visit, VisitMut};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct StageParamsObject {
|
||||
pub url: Option<String>,
|
||||
pub encryption: KeyValueOptions,
|
||||
pub endpoint: Option<String>,
|
||||
pub storage_integration: Option<String>,
|
||||
pub credentials: KeyValueOptions,
|
||||
}
|
||||
|
||||
/// This enum enables support for both standard SQL select item expressions
|
||||
/// and Snowflake-specific ones for data loading.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum StageLoadSelectItemKind {
|
||||
SelectItem(SelectItem),
|
||||
StageLoadSelectItem(StageLoadSelectItem),
|
||||
}
|
||||
|
||||
impl fmt::Display for StageLoadSelectItemKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match &self {
|
||||
StageLoadSelectItemKind::SelectItem(item) => write!(f, "{item}"),
|
||||
StageLoadSelectItemKind::StageLoadSelectItem(item) => write!(f, "{item}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct StageLoadSelectItem {
|
||||
pub alias: Option<Ident>,
|
||||
pub file_col_num: i32,
|
||||
pub element: Option<Ident>,
|
||||
pub item_as: Option<Ident>,
|
||||
}
|
||||
|
||||
impl fmt::Display for StageParamsObject {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let url = &self.url.as_ref();
|
||||
let storage_integration = &self.storage_integration.as_ref();
|
||||
let endpoint = &self.endpoint.as_ref();
|
||||
|
||||
if url.is_some() {
|
||||
write!(f, " URL='{}'", url.unwrap())?;
|
||||
}
|
||||
if storage_integration.is_some() {
|
||||
write!(f, " STORAGE_INTEGRATION={}", storage_integration.unwrap())?;
|
||||
}
|
||||
if endpoint.is_some() {
|
||||
write!(f, " ENDPOINT='{}'", endpoint.unwrap())?;
|
||||
}
|
||||
if !self.credentials.options.is_empty() {
|
||||
write!(f, " CREDENTIALS=({})", self.credentials)?;
|
||||
}
|
||||
if !self.encryption.options.is_empty() {
|
||||
write!(f, " ENCRYPTION=({})", self.encryption)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for StageLoadSelectItem {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
if self.alias.is_some() {
|
||||
write!(f, "{}.", self.alias.as_ref().unwrap())?;
|
||||
}
|
||||
write!(f, "${}", self.file_col_num)?;
|
||||
if self.element.is_some() {
|
||||
write!(f, ":{}", self.element.as_ref().unwrap())?;
|
||||
}
|
||||
if self.item_as.is_some() {
|
||||
write!(f, " AS {}", self.item_as.as_ref().unwrap())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct FileStagingCommand {
|
||||
#[cfg_attr(feature = "visitor", visit(with = "visit_relation"))]
|
||||
pub stage: ObjectName,
|
||||
pub pattern: Option<String>,
|
||||
}
|
||||
|
||||
impl fmt::Display for FileStagingCommand {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.stage)?;
|
||||
if let Some(pattern) = self.pattern.as_ref() {
|
||||
write!(f, " PATTERN='{pattern}'")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
9292
src/ast/mod.rs
9292
src/ast/mod.rs
File diff suppressed because it is too large
Load diff
|
@ -1,43 +1,30 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use core::fmt;
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{string::String, vec::Vec};
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[cfg(feature = "visitor")]
|
||||
use sqlparser_derive::{Visit, VisitMut};
|
||||
|
||||
use super::display_separated;
|
||||
|
||||
/// Unary operators
|
||||
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum UnaryOperator {
|
||||
/// Plus, e.g. `+9`
|
||||
Plus,
|
||||
/// Minus, e.g. `-9`
|
||||
Minus,
|
||||
/// Not, e.g. `NOT(true)`
|
||||
Not,
|
||||
/// Bitwise Not, e.g. `~9` (PostgreSQL-specific)
|
||||
PGBitwiseNot,
|
||||
|
@ -51,23 +38,6 @@ pub enum UnaryOperator {
|
|||
PGPrefixFactorial,
|
||||
/// Absolute value, e.g. `@ -9` (PostgreSQL-specific)
|
||||
PGAbs,
|
||||
/// Unary logical not operator: e.g. `! false` (Hive-specific)
|
||||
BangNot,
|
||||
/// `#` Number of points in path or polygon (PostgreSQL/Redshift geometric operator)
|
||||
/// see <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
Hash,
|
||||
/// `@-@` Length or circumference (PostgreSQL/Redshift geometric operator)
|
||||
/// see <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
AtDashAt,
|
||||
/// `@@` Center (PostgreSQL/Redshift geometric operator)
|
||||
/// see <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
DoubleAt,
|
||||
/// `?-` Is horizontal? (PostgreSQL/Redshift geometric operator)
|
||||
/// see <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
QuestionDash,
|
||||
/// `?|` Is vertical? (PostgreSQL/Redshift geometric operator)
|
||||
/// see <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
QuestionPipe,
|
||||
}
|
||||
|
||||
impl fmt::Display for UnaryOperator {
|
||||
|
@ -82,253 +52,45 @@ impl fmt::Display for UnaryOperator {
|
|||
UnaryOperator::PGPostfixFactorial => "!",
|
||||
UnaryOperator::PGPrefixFactorial => "!!",
|
||||
UnaryOperator::PGAbs => "@",
|
||||
UnaryOperator::BangNot => "!",
|
||||
UnaryOperator::Hash => "#",
|
||||
UnaryOperator::AtDashAt => "@-@",
|
||||
UnaryOperator::DoubleAt => "@@",
|
||||
UnaryOperator::QuestionDash => "?-",
|
||||
UnaryOperator::QuestionPipe => "?|",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Binary operators
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum BinaryOperator {
|
||||
/// Plus, e.g. `a + b`
|
||||
Plus,
|
||||
/// Minus, e.g. `a - b`
|
||||
Minus,
|
||||
/// Multiply, e.g. `a * b`
|
||||
Multiply,
|
||||
/// Divide, e.g. `a / b`
|
||||
Divide,
|
||||
/// Modulo, e.g. `a % b`
|
||||
Modulo,
|
||||
/// String/Array Concat operator, e.g. `a || b`
|
||||
StringConcat,
|
||||
/// Greater than, e.g. `a > b`
|
||||
Gt,
|
||||
/// Less than, e.g. `a < b`
|
||||
Lt,
|
||||
/// Greater equal, e.g. `a >= b`
|
||||
GtEq,
|
||||
/// Less equal, e.g. `a <= b`
|
||||
LtEq,
|
||||
/// Spaceship, e.g. `a <=> b`
|
||||
Spaceship,
|
||||
/// Equal, e.g. `a = b`
|
||||
Eq,
|
||||
/// Not equal, e.g. `a <> b`
|
||||
NotEq,
|
||||
/// And, e.g. `a AND b`
|
||||
And,
|
||||
/// Or, e.g. `a OR b`
|
||||
Or,
|
||||
/// XOR, e.g. `a XOR b`
|
||||
Xor,
|
||||
/// Bitwise or, e.g. `a | b`
|
||||
BitwiseOr,
|
||||
/// Bitwise and, e.g. `a & b`
|
||||
BitwiseAnd,
|
||||
/// Bitwise XOR, e.g. `a ^ b`
|
||||
BitwiseXor,
|
||||
/// Integer division operator `//` in DuckDB
|
||||
DuckIntegerDivide,
|
||||
/// MySQL [`DIV`](https://dev.mysql.com/doc/refman/8.0/en/arithmetic-functions.html) integer division
|
||||
MyIntegerDivide,
|
||||
/// MATCH operator, e.g. `a MATCH b` (SQLite-specific)
|
||||
/// See <https://www.sqlite.org/lang_expr.html#the_like_glob_regexp_match_and_extract_operators>
|
||||
Match,
|
||||
/// REGEXP operator, e.g. `a REGEXP b` (SQLite-specific)
|
||||
Regexp,
|
||||
/// Support for custom operators (such as Postgres custom operators)
|
||||
Custom(String),
|
||||
/// Bitwise XOR, e.g. `a # b` (PostgreSQL-specific)
|
||||
PGBitwiseXor,
|
||||
/// Bitwise shift left, e.g. `a << b` (PostgreSQL-specific)
|
||||
PGBitwiseShiftLeft,
|
||||
/// Bitwise shift right, e.g. `a >> b` (PostgreSQL-specific)
|
||||
PGBitwiseShiftRight,
|
||||
/// Exponent, e.g. `a ^ b` (PostgreSQL-specific)
|
||||
PGExp,
|
||||
/// Overlap operator, e.g. `a && b` (PostgreSQL-specific)
|
||||
PGOverlap,
|
||||
/// String matches regular expression (case sensitively), e.g. `a ~ b` (PostgreSQL-specific)
|
||||
PGRegexMatch,
|
||||
/// String matches regular expression (case insensitively), e.g. `a ~* b` (PostgreSQL-specific)
|
||||
PGRegexIMatch,
|
||||
/// String does not match regular expression (case sensitively), e.g. `a !~ b` (PostgreSQL-specific)
|
||||
PGRegexNotMatch,
|
||||
/// String does not match regular expression (case insensitively), e.g. `a !~* b` (PostgreSQL-specific)
|
||||
PGRegexNotIMatch,
|
||||
/// String matches pattern (case sensitively), e.g. `a ~~ b` (PostgreSQL-specific)
|
||||
PGLikeMatch,
|
||||
/// String matches pattern (case insensitively), e.g. `a ~~* b` (PostgreSQL-specific)
|
||||
PGILikeMatch,
|
||||
/// String does not match pattern (case sensitively), e.g. `a !~~ b` (PostgreSQL-specific)
|
||||
PGNotLikeMatch,
|
||||
/// String does not match pattern (case insensitively), e.g. `a !~~* b` (PostgreSQL-specific)
|
||||
PGNotILikeMatch,
|
||||
/// String "starts with", eg: `a ^@ b` (PostgreSQL-specific)
|
||||
PGStartsWith,
|
||||
/// The `->` operator.
|
||||
///
|
||||
/// On PostgreSQL, this operator extracts a JSON object field or array
|
||||
/// element, for example `'{"a":"b"}'::json -> 'a'` or `[1, 2, 3]'::json
|
||||
/// -> 2`.
|
||||
///
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>.
|
||||
Arrow,
|
||||
/// The `->>` operator.
|
||||
///
|
||||
/// On PostgreSQL, this operator extracts a JSON object field or JSON
|
||||
/// array element and converts it to text, for example `'{"a":"b"}'::json
|
||||
/// ->> 'a'` or `[1, 2, 3]'::json ->> 2`.
|
||||
///
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>.
|
||||
LongArrow,
|
||||
/// The `#>` operator.
|
||||
///
|
||||
/// On PostgreSQL, this operator extracts a JSON sub-object at the specified
|
||||
/// path, for example:
|
||||
///
|
||||
/// ```notrust
|
||||
///'{"a": {"b": ["foo","bar"]}}'::json #> '{a,b,1}'
|
||||
/// ```
|
||||
///
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>.
|
||||
HashArrow,
|
||||
/// The `#>>` operator.
|
||||
///
|
||||
/// A PostgreSQL-specific operator that extracts JSON sub-object at the
|
||||
/// specified path, for example
|
||||
///
|
||||
/// ```notrust
|
||||
///'{"a": {"b": ["foo","bar"]}}'::json #>> '{a,b,1}'
|
||||
/// ```
|
||||
///
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>.
|
||||
HashLongArrow,
|
||||
/// The `@@` operator.
|
||||
///
|
||||
/// On PostgreSQL, this is used for JSON and text searches.
|
||||
///
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>.
|
||||
/// See <https://www.postgresql.org/docs/current/functions-textsearch.html>.
|
||||
AtAt,
|
||||
/// The `@>` operator.
|
||||
///
|
||||
/// On PostgreSQL, this is used for JSON and text searches.
|
||||
///
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>.
|
||||
/// See <https://www.postgresql.org/docs/current/functions-textsearch.html>.
|
||||
AtArrow,
|
||||
/// The `<@` operator.
|
||||
///
|
||||
/// On PostgreSQL, this is used for JSON and text searches.
|
||||
///
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>.
|
||||
/// See <https://www.postgresql.org/docs/current/functions-textsearch.html>.
|
||||
ArrowAt,
|
||||
/// The `#-` operator.
|
||||
///
|
||||
/// On PostgreSQL, this operator is used to delete a field or array element
|
||||
/// at a specified path.
|
||||
///
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>.
|
||||
HashMinus,
|
||||
/// The `@?` operator.
|
||||
///
|
||||
/// On PostgreSQL, this operator is used to check the given JSON path
|
||||
/// returns an item for the JSON value.
|
||||
///
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>.
|
||||
AtQuestion,
|
||||
/// The `?` operator.
|
||||
///
|
||||
/// On PostgreSQL, this operator is used to check whether a string exists as a top-level key
|
||||
/// within the JSON value
|
||||
///
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>.
|
||||
Question,
|
||||
/// The `?&` operator.
|
||||
///
|
||||
/// On PostgreSQL, this operator is used to check whether all of the the indicated array
|
||||
/// members exist as top-level keys.
|
||||
///
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>.
|
||||
QuestionAnd,
|
||||
/// The `?|` operator.
|
||||
///
|
||||
/// On PostgreSQL, this operator is used to check whether any of the the indicated array
|
||||
/// members exist as top-level keys.
|
||||
///
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>.
|
||||
QuestionPipe,
|
||||
/// PostgreSQL-specific custom operator.
|
||||
///
|
||||
/// See [CREATE OPERATOR](https://www.postgresql.org/docs/current/sql-createoperator.html)
|
||||
/// for more information.
|
||||
PGCustomBinaryOperator(Vec<String>),
|
||||
/// The `OVERLAPS` operator
|
||||
///
|
||||
/// Specifies a test for an overlap between two datetime periods:
|
||||
/// <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#overlaps-predicate>
|
||||
Overlaps,
|
||||
/// `##` Point of closest proximity (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
DoubleHash,
|
||||
/// `<->` Distance between (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
LtDashGt,
|
||||
/// `&<` Overlaps to left? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
AndLt,
|
||||
/// `&>` Overlaps to right? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
AndGt,
|
||||
/// `<<|` Is strictly below? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
LtLtPipe,
|
||||
/// `|>>` Is strictly above? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
PipeGtGt,
|
||||
/// `&<|` Does not extend above? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
AndLtPipe,
|
||||
/// `|&>` Does not extend below? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
PipeAndGt,
|
||||
/// `<^` Is below? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
LtCaret,
|
||||
/// `>^` Is above? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
GtCaret,
|
||||
/// `?#` Intersects? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
QuestionHash,
|
||||
/// `?-` Is horizontal? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
QuestionDash,
|
||||
/// `?-|` Is perpendicular? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
QuestionDashPipe,
|
||||
/// `?||` Are Parallel? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
QuestionDoublePipe,
|
||||
/// `@` Contained or on? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
At,
|
||||
/// `~=` Same as? (PostgreSQL/Redshift geometric operator)
|
||||
/// See <https://www.postgresql.org/docs/9.5/functions-geometry.html>
|
||||
TildeEq,
|
||||
/// ':=' Assignment Operator
|
||||
/// See <https://dev.mysql.com/doc/refman/8.4/en/assignment-operators.html#operator_assign-value>
|
||||
Assignment,
|
||||
}
|
||||
|
||||
impl fmt::Display for BinaryOperator {
|
||||
|
@ -353,58 +115,16 @@ impl fmt::Display for BinaryOperator {
|
|||
BinaryOperator::BitwiseOr => f.write_str("|"),
|
||||
BinaryOperator::BitwiseAnd => f.write_str("&"),
|
||||
BinaryOperator::BitwiseXor => f.write_str("^"),
|
||||
BinaryOperator::DuckIntegerDivide => f.write_str("//"),
|
||||
BinaryOperator::MyIntegerDivide => f.write_str("DIV"),
|
||||
BinaryOperator::Match => f.write_str("MATCH"),
|
||||
BinaryOperator::Regexp => f.write_str("REGEXP"),
|
||||
BinaryOperator::Custom(s) => f.write_str(s),
|
||||
BinaryOperator::PGBitwiseXor => f.write_str("#"),
|
||||
BinaryOperator::PGBitwiseShiftLeft => f.write_str("<<"),
|
||||
BinaryOperator::PGBitwiseShiftRight => f.write_str(">>"),
|
||||
BinaryOperator::PGExp => f.write_str("^"),
|
||||
BinaryOperator::PGOverlap => f.write_str("&&"),
|
||||
BinaryOperator::PGRegexMatch => f.write_str("~"),
|
||||
BinaryOperator::PGRegexIMatch => f.write_str("~*"),
|
||||
BinaryOperator::PGRegexNotMatch => f.write_str("!~"),
|
||||
BinaryOperator::PGRegexNotIMatch => f.write_str("!~*"),
|
||||
BinaryOperator::PGLikeMatch => f.write_str("~~"),
|
||||
BinaryOperator::PGILikeMatch => f.write_str("~~*"),
|
||||
BinaryOperator::PGNotLikeMatch => f.write_str("!~~"),
|
||||
BinaryOperator::PGNotILikeMatch => f.write_str("!~~*"),
|
||||
BinaryOperator::PGStartsWith => f.write_str("^@"),
|
||||
BinaryOperator::Arrow => f.write_str("->"),
|
||||
BinaryOperator::LongArrow => f.write_str("->>"),
|
||||
BinaryOperator::HashArrow => f.write_str("#>"),
|
||||
BinaryOperator::HashLongArrow => f.write_str("#>>"),
|
||||
BinaryOperator::AtAt => f.write_str("@@"),
|
||||
BinaryOperator::AtArrow => f.write_str("@>"),
|
||||
BinaryOperator::ArrowAt => f.write_str("<@"),
|
||||
BinaryOperator::HashMinus => f.write_str("#-"),
|
||||
BinaryOperator::AtQuestion => f.write_str("@?"),
|
||||
BinaryOperator::Question => f.write_str("?"),
|
||||
BinaryOperator::QuestionAnd => f.write_str("?&"),
|
||||
BinaryOperator::QuestionPipe => f.write_str("?|"),
|
||||
BinaryOperator::PGCustomBinaryOperator(idents) => {
|
||||
write!(f, "OPERATOR({})", display_separated(idents, "."))
|
||||
}
|
||||
BinaryOperator::Overlaps => f.write_str("OVERLAPS"),
|
||||
BinaryOperator::DoubleHash => f.write_str("##"),
|
||||
BinaryOperator::LtDashGt => f.write_str("<->"),
|
||||
BinaryOperator::AndLt => f.write_str("&<"),
|
||||
BinaryOperator::AndGt => f.write_str("&>"),
|
||||
BinaryOperator::LtLtPipe => f.write_str("<<|"),
|
||||
BinaryOperator::PipeGtGt => f.write_str("|>>"),
|
||||
BinaryOperator::AndLtPipe => f.write_str("&<|"),
|
||||
BinaryOperator::PipeAndGt => f.write_str("|&>"),
|
||||
BinaryOperator::LtCaret => f.write_str("<^"),
|
||||
BinaryOperator::GtCaret => f.write_str(">^"),
|
||||
BinaryOperator::QuestionHash => f.write_str("?#"),
|
||||
BinaryOperator::QuestionDash => f.write_str("?-"),
|
||||
BinaryOperator::QuestionDashPipe => f.write_str("?-|"),
|
||||
BinaryOperator::QuestionDoublePipe => f.write_str("?||"),
|
||||
BinaryOperator::At => f.write_str("@"),
|
||||
BinaryOperator::TildeEq => f.write_str("~="),
|
||||
BinaryOperator::Assignment => f.write_str(":="),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
3260
src/ast/query.rs
3260
src/ast/query.rs
File diff suppressed because it is too large
Load diff
2506
src/ast/spans.rs
2506
src/ast/spans.rs
File diff suppressed because it is too large
Load diff
|
@ -1,165 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
//! SQL Abstract Syntax Tree (AST) for triggers.
|
||||
use super::*;
|
||||
|
||||
/// This specifies whether the trigger function should be fired once for every row affected by the trigger event, or just once per SQL statement.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum TriggerObject {
|
||||
Row,
|
||||
Statement,
|
||||
}
|
||||
|
||||
impl fmt::Display for TriggerObject {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TriggerObject::Row => write!(f, "ROW"),
|
||||
TriggerObject::Statement => write!(f, "STATEMENT"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This clause indicates whether the following relation name is for the before-image transition relation or the after-image transition relation
|
||||
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum TriggerReferencingType {
|
||||
OldTable,
|
||||
NewTable,
|
||||
}
|
||||
|
||||
impl fmt::Display for TriggerReferencingType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TriggerReferencingType::OldTable => write!(f, "OLD TABLE"),
|
||||
TriggerReferencingType::NewTable => write!(f, "NEW TABLE"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This keyword immediately precedes the declaration of one or two relation names that provide access to the transition relations of the triggering statement
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct TriggerReferencing {
|
||||
pub refer_type: TriggerReferencingType,
|
||||
pub is_as: bool,
|
||||
pub transition_relation_name: ObjectName,
|
||||
}
|
||||
|
||||
impl fmt::Display for TriggerReferencing {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{refer_type}{is_as} {relation_name}",
|
||||
refer_type = self.refer_type,
|
||||
is_as = if self.is_as { " AS" } else { "" },
|
||||
relation_name = self.transition_relation_name
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Used to describe trigger events
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum TriggerEvent {
|
||||
Insert,
|
||||
Update(Vec<Ident>),
|
||||
Delete,
|
||||
Truncate,
|
||||
}
|
||||
|
||||
impl fmt::Display for TriggerEvent {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TriggerEvent::Insert => write!(f, "INSERT"),
|
||||
TriggerEvent::Update(columns) => {
|
||||
write!(f, "UPDATE")?;
|
||||
if !columns.is_empty() {
|
||||
write!(f, " OF")?;
|
||||
write!(f, " {}", display_comma_separated(columns))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
TriggerEvent::Delete => write!(f, "DELETE"),
|
||||
TriggerEvent::Truncate => write!(f, "TRUNCATE"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Trigger period
|
||||
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum TriggerPeriod {
|
||||
For,
|
||||
After,
|
||||
Before,
|
||||
InsteadOf,
|
||||
}
|
||||
|
||||
impl fmt::Display for TriggerPeriod {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TriggerPeriod::For => write!(f, "FOR"),
|
||||
TriggerPeriod::After => write!(f, "AFTER"),
|
||||
TriggerPeriod::Before => write!(f, "BEFORE"),
|
||||
TriggerPeriod::InsteadOf => write!(f, "INSTEAD OF"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Types of trigger body execution body.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum TriggerExecBodyType {
|
||||
Function,
|
||||
Procedure,
|
||||
}
|
||||
|
||||
impl fmt::Display for TriggerExecBodyType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TriggerExecBodyType::Function => write!(f, "FUNCTION"),
|
||||
TriggerExecBodyType::Procedure => write!(f, "PROCEDURE"),
|
||||
}
|
||||
}
|
||||
}
|
||||
/// This keyword immediately precedes the declaration of one or two relation names that provide access to the transition relations of the triggering statement
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct TriggerExecBody {
|
||||
pub exec_type: TriggerExecBodyType,
|
||||
pub func_desc: FunctionDesc,
|
||||
}
|
||||
|
||||
impl fmt::Display for TriggerExecBody {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{exec_type} {func_desc}",
|
||||
exec_type = self.exec_type,
|
||||
func_desc = self.func_desc
|
||||
)
|
||||
}
|
||||
}
|
498
src/ast/value.rs
498
src/ast/value.rs
|
@ -1,170 +1,40 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::boxed::Box;
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::string::String;
|
||||
|
||||
use core::fmt;
|
||||
|
||||
#[cfg(feature = "bigdecimal")]
|
||||
use bigdecimal::BigDecimal;
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{ast::Ident, tokenizer::Span};
|
||||
#[cfg(feature = "visitor")]
|
||||
use sqlparser_derive::{Visit, VisitMut};
|
||||
|
||||
/// Wraps a primitive SQL [`Value`] with its [`Span`] location
|
||||
///
|
||||
/// # Example: create a `ValueWithSpan` from a `Value`
|
||||
/// ```
|
||||
/// # use sqlparser::ast::{Value, ValueWithSpan};
|
||||
/// # use sqlparser::tokenizer::{Location, Span};
|
||||
/// let value = Value::SingleQuotedString(String::from("endpoint"));
|
||||
/// // from line 1, column 1 to line 1, column 7
|
||||
/// let span = Span::new(Location::new(1, 1), Location::new(1, 7));
|
||||
/// let value_with_span = value.with_span(span);
|
||||
/// ```
|
||||
///
|
||||
/// # Example: create a `ValueWithSpan` from a `Value` with an empty span
|
||||
///
|
||||
/// You can call [`Value::with_empty_span`] to create a `ValueWithSpan` with an empty span
|
||||
/// ```
|
||||
/// # use sqlparser::ast::{Value, ValueWithSpan};
|
||||
/// # use sqlparser::tokenizer::{Location, Span};
|
||||
/// let value = Value::SingleQuotedString(String::from("endpoint"));
|
||||
/// let value_with_span = value.with_empty_span();
|
||||
/// assert_eq!(value_with_span.span, Span::empty());
|
||||
/// ```
|
||||
///
|
||||
/// You can also use the [`From`] trait to convert `ValueWithSpan` to/from `Value`s
|
||||
/// ```
|
||||
/// # use sqlparser::ast::{Value, ValueWithSpan};
|
||||
/// # use sqlparser::tokenizer::{Location, Span};
|
||||
/// let value = Value::SingleQuotedString(String::from("endpoint"));
|
||||
/// // converting `Value` to `ValueWithSpan` results in an empty span
|
||||
/// let value_with_span: ValueWithSpan = value.into();
|
||||
/// assert_eq!(value_with_span.span, Span::empty());
|
||||
/// // convert back to `Value`
|
||||
/// let value: Value = value_with_span.into();
|
||||
/// ```
|
||||
#[derive(Debug, Clone, Eq)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct ValueWithSpan {
|
||||
pub value: Value,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl PartialEq for ValueWithSpan {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.value == other.value
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for ValueWithSpan {
|
||||
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
|
||||
self.value.cmp(&other.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for ValueWithSpan {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
|
||||
Some(Ord::cmp(self, other))
|
||||
}
|
||||
}
|
||||
|
||||
impl core::hash::Hash for ValueWithSpan {
|
||||
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
|
||||
self.value.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Value> for ValueWithSpan {
|
||||
fn from(value: Value) -> Self {
|
||||
value.with_empty_span()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ValueWithSpan> for Value {
|
||||
fn from(value: ValueWithSpan) -> Self {
|
||||
value.value
|
||||
}
|
||||
}
|
||||
|
||||
/// Primitive SQL values such as number and string
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(
|
||||
feature = "visitor",
|
||||
derive(Visit, VisitMut),
|
||||
visit(with = "visit_value")
|
||||
)]
|
||||
pub enum Value {
|
||||
/// Numeric literal
|
||||
#[cfg(not(feature = "bigdecimal"))]
|
||||
Number(String, bool),
|
||||
#[cfg(feature = "bigdecimal")]
|
||||
// HINT: use `test_utils::number` to make an instance of
|
||||
// Value::Number This might help if you your tests pass locally
|
||||
// but fail on CI with the `--all-features` flag enabled
|
||||
Number(BigDecimal, bool),
|
||||
/// 'string value'
|
||||
SingleQuotedString(String),
|
||||
// $<tag_name>$string value$<tag_name>$ (postgres syntax)
|
||||
DollarQuotedString(DollarQuotedString),
|
||||
/// Triple single quoted strings: Example '''abc'''
|
||||
/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_literals)
|
||||
TripleSingleQuotedString(String),
|
||||
/// Triple double quoted strings: Example """abc"""
|
||||
/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_literals)
|
||||
TripleDoubleQuotedString(String),
|
||||
/// e'string value' (postgres extension)
|
||||
/// See [Postgres docs](https://www.postgresql.org/docs/8.3/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS)
|
||||
/// for more details.
|
||||
/// <https://www.postgresql.org/docs/8.3/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS
|
||||
EscapedStringLiteral(String),
|
||||
/// u&'string value' (postgres extension)
|
||||
/// See [Postgres docs](https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS-UESCAPE)
|
||||
/// for more details.
|
||||
UnicodeStringLiteral(String),
|
||||
/// B'string value'
|
||||
SingleQuotedByteStringLiteral(String),
|
||||
/// B"string value"
|
||||
DoubleQuotedByteStringLiteral(String),
|
||||
/// Triple single quoted literal with byte string prefix. Example `B'''abc'''`
|
||||
/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_literals)
|
||||
TripleSingleQuotedByteStringLiteral(String),
|
||||
/// Triple double quoted literal with byte string prefix. Example `B"""abc"""`
|
||||
/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_literals)
|
||||
TripleDoubleQuotedByteStringLiteral(String),
|
||||
/// Single quoted literal with raw string prefix. Example `R'abc'`
|
||||
/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_literals)
|
||||
SingleQuotedRawStringLiteral(String),
|
||||
/// Double quoted literal with raw string prefix. Example `R"abc"`
|
||||
/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_literals)
|
||||
DoubleQuotedRawStringLiteral(String),
|
||||
/// Triple single quoted literal with raw string prefix. Example `R'''abc'''`
|
||||
/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_literals)
|
||||
TripleSingleQuotedRawStringLiteral(String),
|
||||
/// Triple double quoted literal with raw string prefix. Example `R"""abc"""`
|
||||
/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_literals)
|
||||
TripleDoubleQuotedRawStringLiteral(String),
|
||||
/// N'string value'
|
||||
NationalStringLiteral(String),
|
||||
/// X'hex value'
|
||||
|
@ -179,252 +49,77 @@ pub enum Value {
|
|||
Placeholder(String),
|
||||
}
|
||||
|
||||
impl ValueWithSpan {
|
||||
/// If the underlying literal is a string, regardless of quote style, returns the associated string value
|
||||
pub fn into_string(self) -> Option<String> {
|
||||
self.value.into_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl Value {
|
||||
/// If the underlying literal is a string, regardless of quote style, returns the associated string value
|
||||
pub fn into_string(self) -> Option<String> {
|
||||
match self {
|
||||
Value::SingleQuotedString(s)
|
||||
| Value::DoubleQuotedString(s)
|
||||
| Value::TripleSingleQuotedString(s)
|
||||
| Value::TripleDoubleQuotedString(s)
|
||||
| Value::SingleQuotedByteStringLiteral(s)
|
||||
| Value::DoubleQuotedByteStringLiteral(s)
|
||||
| Value::TripleSingleQuotedByteStringLiteral(s)
|
||||
| Value::TripleDoubleQuotedByteStringLiteral(s)
|
||||
| Value::SingleQuotedRawStringLiteral(s)
|
||||
| Value::DoubleQuotedRawStringLiteral(s)
|
||||
| Value::TripleSingleQuotedRawStringLiteral(s)
|
||||
| Value::TripleDoubleQuotedRawStringLiteral(s)
|
||||
| Value::EscapedStringLiteral(s)
|
||||
| Value::UnicodeStringLiteral(s)
|
||||
| Value::NationalStringLiteral(s)
|
||||
| Value::HexStringLiteral(s) => Some(s),
|
||||
Value::DollarQuotedString(s) => Some(s.value),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_span(self, span: Span) -> ValueWithSpan {
|
||||
ValueWithSpan { value: self, span }
|
||||
}
|
||||
|
||||
pub fn with_empty_span(self) -> ValueWithSpan {
|
||||
self.with_span(Span::empty())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ValueWithSpan {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Value {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Value::Number(v, l) => write!(f, "{}{long}", v, long = if *l { "L" } else { "" }),
|
||||
Value::DoubleQuotedString(v) => write!(f, "\"{}\"", escape_double_quote_string(v)),
|
||||
Value::DoubleQuotedString(v) => write!(f, "\"{}\"", v),
|
||||
Value::SingleQuotedString(v) => write!(f, "'{}'", escape_single_quote_string(v)),
|
||||
Value::TripleSingleQuotedString(v) => {
|
||||
write!(f, "'''{v}'''")
|
||||
}
|
||||
Value::TripleDoubleQuotedString(v) => {
|
||||
write!(f, r#""""{v}""""#)
|
||||
}
|
||||
Value::DollarQuotedString(v) => write!(f, "{v}"),
|
||||
Value::EscapedStringLiteral(v) => write!(f, "E'{}'", escape_escaped_string(v)),
|
||||
Value::UnicodeStringLiteral(v) => write!(f, "U&'{}'", escape_unicode_string(v)),
|
||||
Value::NationalStringLiteral(v) => write!(f, "N'{v}'"),
|
||||
Value::HexStringLiteral(v) => write!(f, "X'{v}'"),
|
||||
Value::Boolean(v) => write!(f, "{v}"),
|
||||
Value::SingleQuotedByteStringLiteral(v) => write!(f, "B'{v}'"),
|
||||
Value::DoubleQuotedByteStringLiteral(v) => write!(f, "B\"{v}\""),
|
||||
Value::TripleSingleQuotedByteStringLiteral(v) => write!(f, "B'''{v}'''"),
|
||||
Value::TripleDoubleQuotedByteStringLiteral(v) => write!(f, r#"B"""{v}""""#),
|
||||
Value::SingleQuotedRawStringLiteral(v) => write!(f, "R'{v}'"),
|
||||
Value::DoubleQuotedRawStringLiteral(v) => write!(f, "R\"{v}\""),
|
||||
Value::TripleSingleQuotedRawStringLiteral(v) => write!(f, "R'''{v}'''"),
|
||||
Value::TripleDoubleQuotedRawStringLiteral(v) => write!(f, r#"R"""{v}""""#),
|
||||
Value::NationalStringLiteral(v) => write!(f, "N'{}'", v),
|
||||
Value::HexStringLiteral(v) => write!(f, "X'{}'", v),
|
||||
Value::Boolean(v) => write!(f, "{}", v),
|
||||
Value::Null => write!(f, "NULL"),
|
||||
Value::Placeholder(v) => write!(f, "{v}"),
|
||||
Value::Placeholder(v) => write!(f, "{}", v),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct DollarQuotedString {
|
||||
pub value: String,
|
||||
pub tag: Option<String>,
|
||||
}
|
||||
|
||||
impl fmt::Display for DollarQuotedString {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match &self.tag {
|
||||
Some(tag) => {
|
||||
write!(f, "${}${}${}$", tag, self.value, tag)
|
||||
}
|
||||
None => {
|
||||
write!(f, "$${}$$", self.value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum DateTimeField {
|
||||
Year,
|
||||
Years,
|
||||
Month,
|
||||
Months,
|
||||
/// Week optionally followed by a WEEKDAY.
|
||||
///
|
||||
/// ```sql
|
||||
/// WEEK(MONDAY)
|
||||
/// ```
|
||||
///
|
||||
/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/date_functions#extract)
|
||||
Week(Option<Ident>),
|
||||
Weeks,
|
||||
Week,
|
||||
Day,
|
||||
DayOfWeek,
|
||||
DayOfYear,
|
||||
Days,
|
||||
Date,
|
||||
Datetime,
|
||||
Hour,
|
||||
Hours,
|
||||
Minute,
|
||||
Minutes,
|
||||
Second,
|
||||
Seconds,
|
||||
Century,
|
||||
Decade,
|
||||
Dow,
|
||||
Doy,
|
||||
Epoch,
|
||||
Isodow,
|
||||
IsoWeek,
|
||||
Isoyear,
|
||||
Julian,
|
||||
Microsecond,
|
||||
Microseconds,
|
||||
Millenium,
|
||||
Millennium,
|
||||
Millisecond,
|
||||
Milliseconds,
|
||||
Nanosecond,
|
||||
Nanoseconds,
|
||||
Quarter,
|
||||
Time,
|
||||
Timezone,
|
||||
TimezoneAbbr,
|
||||
TimezoneHour,
|
||||
TimezoneMinute,
|
||||
TimezoneRegion,
|
||||
NoDateTime,
|
||||
/// Arbitrary abbreviation or custom date-time part.
|
||||
///
|
||||
/// ```sql
|
||||
/// EXTRACT(q FROM CURRENT_TIMESTAMP)
|
||||
/// ```
|
||||
/// [Snowflake](https://docs.snowflake.com/en/sql-reference/functions-date-time#supported-date-and-time-parts)
|
||||
Custom(Ident),
|
||||
}
|
||||
|
||||
impl fmt::Display for DateTimeField {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
DateTimeField::Year => write!(f, "YEAR"),
|
||||
DateTimeField::Years => write!(f, "YEARS"),
|
||||
DateTimeField::Month => write!(f, "MONTH"),
|
||||
DateTimeField::Months => write!(f, "MONTHS"),
|
||||
DateTimeField::Week(week_day) => {
|
||||
write!(f, "WEEK")?;
|
||||
if let Some(week_day) = week_day {
|
||||
write!(f, "({week_day})")?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
DateTimeField::Weeks => write!(f, "WEEKS"),
|
||||
DateTimeField::Day => write!(f, "DAY"),
|
||||
DateTimeField::DayOfWeek => write!(f, "DAYOFWEEK"),
|
||||
DateTimeField::DayOfYear => write!(f, "DAYOFYEAR"),
|
||||
DateTimeField::Days => write!(f, "DAYS"),
|
||||
DateTimeField::Date => write!(f, "DATE"),
|
||||
DateTimeField::Datetime => write!(f, "DATETIME"),
|
||||
DateTimeField::Hour => write!(f, "HOUR"),
|
||||
DateTimeField::Hours => write!(f, "HOURS"),
|
||||
DateTimeField::Minute => write!(f, "MINUTE"),
|
||||
DateTimeField::Minutes => write!(f, "MINUTES"),
|
||||
DateTimeField::Second => write!(f, "SECOND"),
|
||||
DateTimeField::Seconds => write!(f, "SECONDS"),
|
||||
DateTimeField::Century => write!(f, "CENTURY"),
|
||||
DateTimeField::Decade => write!(f, "DECADE"),
|
||||
DateTimeField::Dow => write!(f, "DOW"),
|
||||
DateTimeField::Doy => write!(f, "DOY"),
|
||||
DateTimeField::Epoch => write!(f, "EPOCH"),
|
||||
DateTimeField::Isodow => write!(f, "ISODOW"),
|
||||
DateTimeField::Isoyear => write!(f, "ISOYEAR"),
|
||||
DateTimeField::IsoWeek => write!(f, "ISOWEEK"),
|
||||
DateTimeField::Julian => write!(f, "JULIAN"),
|
||||
DateTimeField::Microsecond => write!(f, "MICROSECOND"),
|
||||
DateTimeField::Microseconds => write!(f, "MICROSECONDS"),
|
||||
DateTimeField::Millenium => write!(f, "MILLENIUM"),
|
||||
DateTimeField::Millennium => write!(f, "MILLENNIUM"),
|
||||
DateTimeField::Millisecond => write!(f, "MILLISECOND"),
|
||||
DateTimeField::Milliseconds => write!(f, "MILLISECONDS"),
|
||||
DateTimeField::Nanosecond => write!(f, "NANOSECOND"),
|
||||
DateTimeField::Nanoseconds => write!(f, "NANOSECONDS"),
|
||||
DateTimeField::Quarter => write!(f, "QUARTER"),
|
||||
DateTimeField::Time => write!(f, "TIME"),
|
||||
DateTimeField::Timezone => write!(f, "TIMEZONE"),
|
||||
DateTimeField::TimezoneAbbr => write!(f, "TIMEZONE_ABBR"),
|
||||
DateTimeField::TimezoneHour => write!(f, "TIMEZONE_HOUR"),
|
||||
DateTimeField::TimezoneMinute => write!(f, "TIMEZONE_MINUTE"),
|
||||
DateTimeField::TimezoneRegion => write!(f, "TIMEZONE_REGION"),
|
||||
DateTimeField::NoDateTime => write!(f, "NODATETIME"),
|
||||
DateTimeField::Custom(custom) => write!(f, "{custom}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
/// The Unicode Standard defines four normalization forms, which are intended to eliminate
|
||||
/// certain distinctions between visually or functionally identical characters.
|
||||
///
|
||||
/// See [Unicode Normalization Forms](https://unicode.org/reports/tr15/) for details.
|
||||
pub enum NormalizationForm {
|
||||
/// Canonical Decomposition, followed by Canonical Composition.
|
||||
NFC,
|
||||
/// Canonical Decomposition.
|
||||
NFD,
|
||||
/// Compatibility Decomposition, followed by Canonical Composition.
|
||||
NFKC,
|
||||
/// Compatibility Decomposition.
|
||||
NFKD,
|
||||
}
|
||||
|
||||
impl fmt::Display for NormalizationForm {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
NormalizationForm::NFC => write!(f, "NFC"),
|
||||
NormalizationForm::NFD => write!(f, "NFD"),
|
||||
NormalizationForm::NFKC => write!(f, "NFKC"),
|
||||
NormalizationForm::NFKD => write!(f, "NFKD"),
|
||||
}
|
||||
f.write_str(match self {
|
||||
DateTimeField::Year => "YEAR",
|
||||
DateTimeField::Month => "MONTH",
|
||||
DateTimeField::Week => "WEEK",
|
||||
DateTimeField::Day => "DAY",
|
||||
DateTimeField::Hour => "HOUR",
|
||||
DateTimeField::Minute => "MINUTE",
|
||||
DateTimeField::Second => "SECOND",
|
||||
DateTimeField::Century => "CENTURY",
|
||||
DateTimeField::Decade => "DECADE",
|
||||
DateTimeField::Dow => "DOW",
|
||||
DateTimeField::Doy => "DOY",
|
||||
DateTimeField::Epoch => "EPOCH",
|
||||
DateTimeField::Isodow => "ISODOW",
|
||||
DateTimeField::Isoyear => "ISOYEAR",
|
||||
DateTimeField::Julian => "JULIAN",
|
||||
DateTimeField::Microseconds => "MICROSECONDS",
|
||||
DateTimeField::Millenium => "MILLENIUM",
|
||||
DateTimeField::Millennium => "MILLENNIUM",
|
||||
DateTimeField::Milliseconds => "MILLISECONDS",
|
||||
DateTimeField::Quarter => "QUARTER",
|
||||
DateTimeField::Timezone => "TIMEZONE",
|
||||
DateTimeField::TimezoneHour => "TIMEZONE_HOUR",
|
||||
DateTimeField::TimezoneMinute => "TIMEZONE_MINUTE",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -433,60 +128,15 @@ pub struct EscapeQuotedString<'a> {
|
|||
quote: char,
|
||||
}
|
||||
|
||||
impl fmt::Display for EscapeQuotedString<'_> {
|
||||
impl<'a> fmt::Display for EscapeQuotedString<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
// EscapeQuotedString doesn't know which mode of escape was
|
||||
// chosen by the user. So this code must to correctly display
|
||||
// strings without knowing if the strings are already escaped
|
||||
// or not.
|
||||
//
|
||||
// If the quote symbol in the string is repeated twice, OR, if
|
||||
// the quote symbol is after backslash, display all the chars
|
||||
// without any escape. However, if the quote symbol is used
|
||||
// just between usual chars, `fmt()` should display it twice."
|
||||
//
|
||||
// The following table has examples
|
||||
//
|
||||
// | original query | mode | AST Node | serialized |
|
||||
// | ------------- | --------- | -------------------------------------------------- | ------------ |
|
||||
// | `"A""B""A"` | no-escape | `DoubleQuotedString(String::from("A\"\"B\"\"A"))` | `"A""B""A"` |
|
||||
// | `"A""B""A"` | default | `DoubleQuotedString(String::from("A\"B\"A"))` | `"A""B""A"` |
|
||||
// | `"A\"B\"A"` | no-escape | `DoubleQuotedString(String::from("A\\\"B\\\"A"))` | `"A\"B\"A"` |
|
||||
// | `"A\"B\"A"` | default | `DoubleQuotedString(String::from("A\"B\"A"))` | `"A""B""A"` |
|
||||
let quote = self.quote;
|
||||
let mut previous_char = char::default();
|
||||
let mut start_idx = 0;
|
||||
let mut peekable_chars = self.string.char_indices().peekable();
|
||||
while let Some(&(idx, ch)) = peekable_chars.peek() {
|
||||
match ch {
|
||||
char if char == quote => {
|
||||
if previous_char == '\\' {
|
||||
// the quote is already escaped with a backslash, skip
|
||||
peekable_chars.next();
|
||||
continue;
|
||||
}
|
||||
peekable_chars.next();
|
||||
match peekable_chars.peek() {
|
||||
Some((_, c)) if *c == quote => {
|
||||
// the quote is already escaped with another quote, skip
|
||||
peekable_chars.next();
|
||||
}
|
||||
_ => {
|
||||
// The quote is not escaped.
|
||||
// Including idx in the range, so the quote at idx will be printed twice:
|
||||
// in this call to write_str() and in the next one.
|
||||
f.write_str(&self.string[start_idx..=idx])?;
|
||||
start_idx = idx;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
peekable_chars.next();
|
||||
}
|
||||
for c in self.string.chars() {
|
||||
if c == self.quote {
|
||||
write!(f, "{q}{q}", q = self.quote)?;
|
||||
} else {
|
||||
write!(f, "{}", c)?;
|
||||
}
|
||||
previous_char = ch;
|
||||
}
|
||||
f.write_str(&self.string[start_idx..])?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -499,13 +149,9 @@ pub fn escape_single_quote_string(s: &str) -> EscapeQuotedString<'_> {
|
|||
escape_quoted_string(s, '\'')
|
||||
}
|
||||
|
||||
pub fn escape_double_quote_string(s: &str) -> EscapeQuotedString<'_> {
|
||||
escape_quoted_string(s, '\"')
|
||||
}
|
||||
|
||||
pub struct EscapeEscapedStringLiteral<'a>(&'a str);
|
||||
|
||||
impl fmt::Display for EscapeEscapedStringLiteral<'_> {
|
||||
impl<'a> fmt::Display for EscapeEscapedStringLiteral<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
for c in self.0.chars() {
|
||||
match c {
|
||||
|
@ -525,7 +171,7 @@ impl fmt::Display for EscapeEscapedStringLiteral<'_> {
|
|||
write!(f, r#"\r"#)?;
|
||||
}
|
||||
_ => {
|
||||
write!(f, "{c}")?;
|
||||
write!(f, "{}", c)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -537,44 +183,8 @@ pub fn escape_escaped_string(s: &str) -> EscapeEscapedStringLiteral<'_> {
|
|||
EscapeEscapedStringLiteral(s)
|
||||
}
|
||||
|
||||
pub struct EscapeUnicodeStringLiteral<'a>(&'a str);
|
||||
|
||||
impl fmt::Display for EscapeUnicodeStringLiteral<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
for c in self.0.chars() {
|
||||
match c {
|
||||
'\'' => {
|
||||
write!(f, "''")?;
|
||||
}
|
||||
'\\' => {
|
||||
write!(f, r#"\\"#)?;
|
||||
}
|
||||
x if x.is_ascii() => {
|
||||
write!(f, "{c}")?;
|
||||
}
|
||||
_ => {
|
||||
let codepoint = c as u32;
|
||||
// if the character fits in 32 bits, we can use the \XXXX format
|
||||
// otherwise, we need to use the \+XXXXXX format
|
||||
if codepoint <= 0xFFFF {
|
||||
write!(f, "\\{codepoint:04X}")?;
|
||||
} else {
|
||||
write!(f, "\\+{codepoint:06X}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn escape_unicode_string(s: &str) -> EscapeUnicodeStringLiteral<'_> {
|
||||
EscapeUnicodeStringLiteral(s)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum TrimWhereField {
|
||||
Both,
|
||||
Leading,
|
||||
|
|
1010
src/ast/visitor.rs
1010
src/ast/visitor.rs
File diff suppressed because it is too large
Load diff
|
@ -1,36 +1,29 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use crate::dialect::Dialect;
|
||||
|
||||
/// A [`Dialect`] for [ANSI SQL](https://en.wikipedia.org/wiki/SQL:2011).
|
||||
#[derive(Debug)]
|
||||
pub struct AnsiDialect {}
|
||||
|
||||
impl Dialect for AnsiDialect {
|
||||
fn is_identifier_start(&self, ch: char) -> bool {
|
||||
ch.is_ascii_lowercase() || ch.is_ascii_uppercase()
|
||||
('a'..='z').contains(&ch) || ('A'..='Z').contains(&ch)
|
||||
}
|
||||
|
||||
fn is_identifier_part(&self, ch: char) -> bool {
|
||||
ch.is_ascii_lowercase() || ch.is_ascii_uppercase() || ch.is_ascii_digit() || ch == '_'
|
||||
}
|
||||
|
||||
fn require_interval_qualifier(&self) -> bool {
|
||||
true
|
||||
('a'..='z').contains(&ch)
|
||||
|| ('A'..='Z').contains(&ch)
|
||||
|| ('0'..='9').contains(&ch)
|
||||
|| ch == '_'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,147 +1,35 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use crate::ast::Statement;
|
||||
use crate::dialect::Dialect;
|
||||
use crate::keywords::Keyword;
|
||||
use crate::parser::{Parser, ParserError};
|
||||
|
||||
/// These keywords are disallowed as column identifiers. Such that
|
||||
/// `SELECT 5 AS <col> FROM T` is rejected by BigQuery.
|
||||
const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[
|
||||
Keyword::WITH,
|
||||
Keyword::SELECT,
|
||||
Keyword::WHERE,
|
||||
Keyword::GROUP,
|
||||
Keyword::HAVING,
|
||||
Keyword::ORDER,
|
||||
Keyword::LATERAL,
|
||||
Keyword::LIMIT,
|
||||
Keyword::FETCH,
|
||||
Keyword::UNION,
|
||||
Keyword::EXCEPT,
|
||||
Keyword::INTERSECT,
|
||||
Keyword::FROM,
|
||||
Keyword::INTO,
|
||||
Keyword::END,
|
||||
];
|
||||
|
||||
/// A [`Dialect`] for [Google Bigquery](https://cloud.google.com/bigquery/)
|
||||
#[derive(Debug, Default)]
|
||||
pub struct BigQueryDialect;
|
||||
|
||||
impl Dialect for BigQueryDialect {
|
||||
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||
if parser.parse_keyword(Keyword::BEGIN) {
|
||||
return Some(parser.parse_begin_exception_end());
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#identifiers>
|
||||
// See https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#identifiers
|
||||
fn is_delimited_identifier_start(&self, ch: char) -> bool {
|
||||
ch == '`'
|
||||
}
|
||||
|
||||
fn supports_projection_trailing_commas(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_table_statement>
|
||||
fn supports_column_definition_trailing_commas(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn is_identifier_start(&self, ch: char) -> bool {
|
||||
ch.is_ascii_lowercase() || ch.is_ascii_uppercase() || ch == '_'
|
||||
// BigQuery supports `@@foo.bar` variable syntax in its procedural language.
|
||||
// https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend
|
||||
|| ch == '@'
|
||||
('a'..='z').contains(&ch) || ('A'..='Z').contains(&ch) || ch == '_'
|
||||
}
|
||||
|
||||
fn is_identifier_part(&self, ch: char) -> bool {
|
||||
ch.is_ascii_lowercase() || ch.is_ascii_uppercase() || ch.is_ascii_digit() || ch == '_'
|
||||
}
|
||||
|
||||
/// See [doc](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_literals)
|
||||
fn supports_triple_quoted_string(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See [doc](https://cloud.google.com/bigquery/docs/reference/standard-sql/navigation_functions#first_value)
|
||||
fn supports_window_function_null_treatment_arg(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// See https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#escape_sequences
|
||||
fn supports_string_literal_backslash_escape(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See [doc](https://cloud.google.com/bigquery/docs/reference/standard-sql/window-function-calls#ref_named_window)
|
||||
fn supports_window_clause_named_window_reference(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See [doc](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#set)
|
||||
fn supports_parenthesized_set_variables(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// See https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#select_except
|
||||
fn supports_select_wildcard_except(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn require_interval_qualifier(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// See https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct
|
||||
fn supports_struct_literal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#select_expression_star>
|
||||
fn supports_select_expr_star(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#execute_immediate>
|
||||
fn supports_execute_immediate(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// See <https://cloud.google.com/bigquery/docs/access-historical-data>
|
||||
fn supports_timestamp_versioning(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#group_by_clause>
|
||||
fn supports_group_by_expr(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool {
|
||||
!RESERVED_FOR_COLUMN_ALIAS.contains(kw)
|
||||
}
|
||||
|
||||
fn supports_pipe_operator(&self) -> bool {
|
||||
true
|
||||
('a'..='z').contains(&ch)
|
||||
|| ('A'..='Z').contains(&ch)
|
||||
|| ('0'..='9').contains(&ch)
|
||||
|| ch == '_'
|
||||
|| ch == '-'
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,97 +1,27 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use crate::dialect::Dialect;
|
||||
|
||||
// A [`Dialect`] for [ClickHouse](https://clickhouse.com/).
|
||||
#[derive(Debug)]
|
||||
pub struct ClickHouseDialect {}
|
||||
|
||||
impl Dialect for ClickHouseDialect {
|
||||
fn is_identifier_start(&self, ch: char) -> bool {
|
||||
// See https://clickhouse.com/docs/en/sql-reference/syntax/#syntax-identifiers
|
||||
ch.is_ascii_lowercase() || ch.is_ascii_uppercase() || ch == '_'
|
||||
('a'..='z').contains(&ch) || ('A'..='Z').contains(&ch) || ch == '_'
|
||||
}
|
||||
|
||||
fn is_identifier_part(&self, ch: char) -> bool {
|
||||
self.is_identifier_start(ch) || ch.is_ascii_digit()
|
||||
}
|
||||
|
||||
fn supports_string_literal_backslash_escape(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_select_wildcard_except(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn describe_requires_table_keyword(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn require_interval_qualifier(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_limit_comma(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_insert_table_function(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_insert_format(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_numeric_literal_underscores(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// ClickHouse uses this for some FORMAT expressions in `INSERT` context, e.g. when inserting
|
||||
// with FORMAT JSONEachRow a raw JSON key-value expression is valid and expected.
|
||||
//
|
||||
// [ClickHouse formats](https://clickhouse.com/docs/en/interfaces/formats)
|
||||
fn supports_dictionary_syntax(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://clickhouse.com/docs/en/sql-reference/functions#higher-order-functions---operator-and-lambdaparams-expr-function>
|
||||
fn supports_lambda_functions(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_from_first_select(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://clickhouse.com/docs/en/sql-reference/statements/select/order-by>
|
||||
fn supports_order_by_all(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// See <https://clickhouse.com/docs/en/sql-reference/aggregate-functions/grouping_function#grouping-sets>
|
||||
fn supports_group_by_expr(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://clickhouse.com/docs/en/sql-reference/statements/select/group-by#rollup-modifier>
|
||||
fn supports_group_by_with_modifier(&self) -> bool {
|
||||
true
|
||||
self.is_identifier_start(ch) || ('0'..='9').contains(&ch)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,67 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
use crate::dialect::Dialect;
|
||||
|
||||
/// A [`Dialect`] for [Databricks SQL](https://www.databricks.com/)
|
||||
///
|
||||
/// See <https://docs.databricks.com/en/sql/language-manual/index.html>.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DatabricksDialect;
|
||||
|
||||
impl Dialect for DatabricksDialect {
|
||||
// see https://docs.databricks.com/en/sql/language-manual/sql-ref-identifiers.html
|
||||
|
||||
fn is_delimited_identifier_start(&self, ch: char) -> bool {
|
||||
matches!(ch, '`')
|
||||
}
|
||||
|
||||
fn is_identifier_start(&self, ch: char) -> bool {
|
||||
matches!(ch, 'a'..='z' | 'A'..='Z' | '_')
|
||||
}
|
||||
|
||||
fn is_identifier_part(&self, ch: char) -> bool {
|
||||
matches!(ch, 'a'..='z' | 'A'..='Z' | '0'..='9' | '_')
|
||||
}
|
||||
|
||||
fn supports_filter_during_aggregation(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// https://docs.databricks.com/en/sql/language-manual/sql-ref-syntax-qry-select-groupby.html
|
||||
fn supports_group_by_expr(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_lambda_functions(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// https://docs.databricks.com/en/sql/language-manual/sql-ref-syntax-qry-select.html#syntax
|
||||
fn supports_select_wildcard_except(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn require_interval_qualifier(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// See https://docs.databricks.com/en/sql/language-manual/functions/struct.html
|
||||
fn supports_struct_literal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
|
@ -1,97 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
use crate::dialect::Dialect;
|
||||
|
||||
/// A [`Dialect`] for [DuckDB](https://duckdb.org/)
|
||||
#[derive(Debug, Default)]
|
||||
pub struct DuckDbDialect;
|
||||
|
||||
// In most cases the redshift dialect is identical to [`PostgresSqlDialect`].
|
||||
impl Dialect for DuckDbDialect {
|
||||
fn supports_trailing_commas(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn is_identifier_start(&self, ch: char) -> bool {
|
||||
ch.is_alphabetic() || ch == '_'
|
||||
}
|
||||
|
||||
fn is_identifier_part(&self, ch: char) -> bool {
|
||||
ch.is_alphabetic() || ch.is_ascii_digit() || ch == '$' || ch == '_'
|
||||
}
|
||||
|
||||
fn supports_filter_during_aggregation(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_group_by_expr(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_named_fn_args_with_eq_operator(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_named_fn_args_with_assignment_operator(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// DuckDB uses this syntax for `STRUCT`s.
|
||||
//
|
||||
// https://duckdb.org/docs/sql/data_types/struct.html#creating-structs
|
||||
fn supports_dictionary_syntax(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// DuckDB uses this syntax for `MAP`s.
|
||||
//
|
||||
// https://duckdb.org/docs/sql/data_types/map.html#creating-maps
|
||||
fn support_map_literal_syntax(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://duckdb.org/docs/sql/functions/lambda.html>
|
||||
fn supports_lambda_functions(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// DuckDB is compatible with PostgreSQL syntax for this statement,
|
||||
// although not all features may be implemented.
|
||||
fn supports_explain_with_utility_options(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See DuckDB <https://duckdb.org/docs/sql/statements/load_and_install.html#load>
|
||||
fn supports_load_extension(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// See DuckDB <https://duckdb.org/docs/sql/data_types/array.html#defining-an-array-field>
|
||||
fn supports_array_typedef_with_brackets(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_from_first_select(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See DuckDB <https://duckdb.org/docs/sql/query_syntax/orderby.html#order-by-all-examples>
|
||||
fn supports_order_by_all(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
|
@ -1,182 +1,36 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use crate::dialect::Dialect;
|
||||
|
||||
/// A permissive, general purpose [`Dialect`], which parses a wide variety of SQL
|
||||
/// statements, from many different dialects.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct GenericDialect;
|
||||
|
||||
impl Dialect for GenericDialect {
|
||||
fn is_delimited_identifier_start(&self, ch: char) -> bool {
|
||||
ch == '"' || ch == '`'
|
||||
}
|
||||
|
||||
fn is_identifier_start(&self, ch: char) -> bool {
|
||||
ch.is_alphabetic() || ch == '_' || ch == '#' || ch == '@'
|
||||
('a'..='z').contains(&ch)
|
||||
|| ('A'..='Z').contains(&ch)
|
||||
|| ch == '_'
|
||||
|| ch == '#'
|
||||
|| ch == '@'
|
||||
}
|
||||
|
||||
fn is_identifier_part(&self, ch: char) -> bool {
|
||||
ch.is_alphabetic()
|
||||
|| ch.is_ascii_digit()
|
||||
('a'..='z').contains(&ch)
|
||||
|| ('A'..='Z').contains(&ch)
|
||||
|| ('0'..='9').contains(&ch)
|
||||
|| ch == '@'
|
||||
|| ch == '$'
|
||||
|| ch == '#'
|
||||
|| ch == '_'
|
||||
}
|
||||
|
||||
fn supports_unicode_string_literal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_group_by_expr(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_group_by_with_modifier(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_left_associative_joins_without_parens(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_connect_by(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_match_recognize(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_start_transaction_modifier(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_window_function_null_treatment_arg(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_dictionary_syntax(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_window_clause_named_window_reference(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_parenthesized_set_variables(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_select_wildcard_except(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn support_map_literal_syntax(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn allow_extract_custom(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn allow_extract_single_quotes(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_create_index_with_clause(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_explain_with_utility_options(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_limit_comma(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_from_first_select(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_projection_trailing_commas(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_asc_desc_in_column_definition(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_try_convert(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_comment_on(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_load_extension(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_named_fn_args_with_assignment_operator(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_struct_literal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_empty_projections(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_nested_comments(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_user_host_grantee(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_string_escape_constant(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_array_typedef_with_brackets(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_match_against(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_set_names(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_comma_separated_set_assignments(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_filter_during_aggregation(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,23 +1,17 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use crate::dialect::Dialect;
|
||||
|
||||
/// A [`Dialect`] for [Hive](https://hive.apache.org/).
|
||||
#[derive(Debug)]
|
||||
pub struct HiveDialect {}
|
||||
|
||||
|
@ -27,13 +21,16 @@ impl Dialect for HiveDialect {
|
|||
}
|
||||
|
||||
fn is_identifier_start(&self, ch: char) -> bool {
|
||||
ch.is_ascii_lowercase() || ch.is_ascii_uppercase() || ch.is_ascii_digit() || ch == '$'
|
||||
('a'..='z').contains(&ch)
|
||||
|| ('A'..='Z').contains(&ch)
|
||||
|| ('0'..='9').contains(&ch)
|
||||
|| ch == '$'
|
||||
}
|
||||
|
||||
fn is_identifier_part(&self, ch: char) -> bool {
|
||||
ch.is_ascii_lowercase()
|
||||
|| ch.is_ascii_uppercase()
|
||||
|| ch.is_ascii_digit()
|
||||
('a'..='z').contains(&ch)
|
||||
|| ('A'..='Z').contains(&ch)
|
||||
|| ('0'..='9').contains(&ch)
|
||||
|| ch == '_'
|
||||
|| ch == '$'
|
||||
|| ch == '{'
|
||||
|
@ -43,32 +40,4 @@ impl Dialect for HiveDialect {
|
|||
fn supports_filter_during_aggregation(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_numeric_prefix(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn require_interval_qualifier(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362061#Tutorial-BuiltInOperators>
|
||||
fn supports_bang_not_operator(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362036#LanguageManualDML-Loadingfilesintotables>
|
||||
fn supports_load_data(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://cwiki.apache.org/confluence/display/hive/languagemanual+sampling>
|
||||
fn supports_table_sample_before_alias(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=30151323#EnhancedAggregation,Cube,GroupingandRollup-CubesandRollupsr>
|
||||
fn supports_group_by_with_modifier(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
|
1213
src/dialect/mod.rs
1213
src/dialect/mod.rs
File diff suppressed because it is too large
Load diff
|
@ -1,35 +1,17 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use crate::ast::helpers::attached_token::AttachedToken;
|
||||
use crate::ast::{
|
||||
BeginEndStatements, ConditionalStatementBlock, ConditionalStatements, GranteesType,
|
||||
IfStatement, Statement, TriggerObject,
|
||||
};
|
||||
use crate::dialect::Dialect;
|
||||
use crate::keywords::{self, Keyword};
|
||||
use crate::parser::{Parser, ParserError};
|
||||
use crate::tokenizer::Token;
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
|
||||
const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[Keyword::IF, Keyword::ELSE];
|
||||
|
||||
/// A [`Dialect`] for [Microsoft SQL Server](https://www.microsoft.com/en-us/sql-server/)
|
||||
#[derive(Debug)]
|
||||
pub struct MsSqlDialect {}
|
||||
|
||||
|
@ -40,259 +22,21 @@ impl Dialect for MsSqlDialect {
|
|||
|
||||
fn is_identifier_start(&self, ch: char) -> bool {
|
||||
// See https://docs.microsoft.com/en-us/sql/relational-databases/databases/database-identifiers?view=sql-server-2017#rules-for-regular-identifiers
|
||||
ch.is_alphabetic() || ch == '_' || ch == '#' || ch == '@'
|
||||
// We don't support non-latin "letters" currently.
|
||||
('a'..='z').contains(&ch)
|
||||
|| ('A'..='Z').contains(&ch)
|
||||
|| ch == '_'
|
||||
|| ch == '#'
|
||||
|| ch == '@'
|
||||
}
|
||||
|
||||
fn is_identifier_part(&self, ch: char) -> bool {
|
||||
ch.is_alphabetic()
|
||||
|| ch.is_ascii_digit()
|
||||
('a'..='z').contains(&ch)
|
||||
|| ('A'..='Z').contains(&ch)
|
||||
|| ('0'..='9').contains(&ch)
|
||||
|| ch == '@'
|
||||
|| ch == '$'
|
||||
|| ch == '#'
|
||||
|| ch == '_'
|
||||
}
|
||||
|
||||
fn identifier_quote_style(&self, _identifier: &str) -> Option<char> {
|
||||
Some('[')
|
||||
}
|
||||
|
||||
/// SQL Server has `CONVERT(type, value)` instead of `CONVERT(value, type)`
|
||||
/// <https://learn.microsoft.com/en-us/sql/t-sql/functions/cast-and-convert-transact-sql?view=sql-server-ver16>
|
||||
fn convert_type_before_value(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_outer_join_operator(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_connect_by(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_eq_alias_assignment(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_try_convert(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// In MSSQL, there is no boolean type, and `true` and `false` are valid column names
|
||||
fn supports_boolean_literals(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn supports_named_fn_args_with_colon_operator(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_named_fn_args_with_expr_name(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_named_fn_args_with_rarrow_operator(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn supports_start_transaction_modifier(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_end_transaction_modifier(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See: <https://learn.microsoft.com/en-us/sql/t-sql/statements/set-statements-transact-sql>
|
||||
fn supports_set_stmt_without_operator(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See: <https://learn.microsoft.com/en-us/sql/relational-databases/tables/querying-data-in-a-system-versioned-temporal-table>
|
||||
fn supports_timestamp_versioning(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://learn.microsoft.com/en-us/sql/t-sql/language-elements/slash-star-comment-transact-sql?view=sql-server-ver16>
|
||||
fn supports_nested_comments(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://learn.microsoft.com/en-us/sql/t-sql/queries/from-transact-sql>
|
||||
fn supports_object_name_double_dot_notation(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://learn.microsoft.com/en-us/sql/relational-databases/security/authentication-access/server-level-roles>
|
||||
fn get_reserved_grantees_types(&self) -> &[GranteesType] {
|
||||
&[GranteesType::Public]
|
||||
}
|
||||
|
||||
fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool {
|
||||
!keywords::RESERVED_FOR_COLUMN_ALIAS.contains(kw) && !RESERVED_FOR_COLUMN_ALIAS.contains(kw)
|
||||
}
|
||||
|
||||
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||
if parser.peek_keyword(Keyword::IF) {
|
||||
Some(self.parse_if_stmt(parser))
|
||||
} else if parser.parse_keywords(&[Keyword::CREATE, Keyword::TRIGGER]) {
|
||||
Some(self.parse_create_trigger(parser, false))
|
||||
} else if parser.parse_keywords(&[
|
||||
Keyword::CREATE,
|
||||
Keyword::OR,
|
||||
Keyword::ALTER,
|
||||
Keyword::TRIGGER,
|
||||
]) {
|
||||
Some(self.parse_create_trigger(parser, true))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MsSqlDialect {
|
||||
/// ```sql
|
||||
/// IF boolean_expression
|
||||
/// { sql_statement | statement_block }
|
||||
/// [ ELSE
|
||||
/// { sql_statement | statement_block } ]
|
||||
/// ```
|
||||
fn parse_if_stmt(&self, parser: &mut Parser) -> Result<Statement, ParserError> {
|
||||
let if_token = parser.expect_keyword(Keyword::IF)?;
|
||||
|
||||
let condition = parser.parse_expr()?;
|
||||
|
||||
let if_block = if parser.peek_keyword(Keyword::BEGIN) {
|
||||
let begin_token = parser.expect_keyword(Keyword::BEGIN)?;
|
||||
let statements = self.parse_statement_list(parser, Some(Keyword::END))?;
|
||||
let end_token = parser.expect_keyword(Keyword::END)?;
|
||||
ConditionalStatementBlock {
|
||||
start_token: AttachedToken(if_token),
|
||||
condition: Some(condition),
|
||||
then_token: None,
|
||||
conditional_statements: ConditionalStatements::BeginEnd(BeginEndStatements {
|
||||
begin_token: AttachedToken(begin_token),
|
||||
statements,
|
||||
end_token: AttachedToken(end_token),
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
let stmt = parser.parse_statement()?;
|
||||
ConditionalStatementBlock {
|
||||
start_token: AttachedToken(if_token),
|
||||
condition: Some(condition),
|
||||
then_token: None,
|
||||
conditional_statements: ConditionalStatements::Sequence {
|
||||
statements: vec![stmt],
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
let mut prior_statement_ended_with_semi_colon = false;
|
||||
while let Token::SemiColon = parser.peek_token_ref().token {
|
||||
parser.advance_token();
|
||||
prior_statement_ended_with_semi_colon = true;
|
||||
}
|
||||
|
||||
let mut else_block = None;
|
||||
if parser.peek_keyword(Keyword::ELSE) {
|
||||
let else_token = parser.expect_keyword(Keyword::ELSE)?;
|
||||
if parser.peek_keyword(Keyword::BEGIN) {
|
||||
let begin_token = parser.expect_keyword(Keyword::BEGIN)?;
|
||||
let statements = self.parse_statement_list(parser, Some(Keyword::END))?;
|
||||
let end_token = parser.expect_keyword(Keyword::END)?;
|
||||
else_block = Some(ConditionalStatementBlock {
|
||||
start_token: AttachedToken(else_token),
|
||||
condition: None,
|
||||
then_token: None,
|
||||
conditional_statements: ConditionalStatements::BeginEnd(BeginEndStatements {
|
||||
begin_token: AttachedToken(begin_token),
|
||||
statements,
|
||||
end_token: AttachedToken(end_token),
|
||||
}),
|
||||
});
|
||||
} else {
|
||||
let stmt = parser.parse_statement()?;
|
||||
else_block = Some(ConditionalStatementBlock {
|
||||
start_token: AttachedToken(else_token),
|
||||
condition: None,
|
||||
then_token: None,
|
||||
conditional_statements: ConditionalStatements::Sequence {
|
||||
statements: vec![stmt],
|
||||
},
|
||||
});
|
||||
}
|
||||
} else if prior_statement_ended_with_semi_colon {
|
||||
parser.prev_token();
|
||||
}
|
||||
|
||||
Ok(Statement::If(IfStatement {
|
||||
if_block,
|
||||
else_block,
|
||||
elseif_blocks: Vec::new(),
|
||||
end_token: None,
|
||||
}))
|
||||
}
|
||||
|
||||
/// Parse `CREATE TRIGGER` for [MsSql]
|
||||
///
|
||||
/// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-trigger-transact-sql
|
||||
fn parse_create_trigger(
|
||||
&self,
|
||||
parser: &mut Parser,
|
||||
or_alter: bool,
|
||||
) -> Result<Statement, ParserError> {
|
||||
let name = parser.parse_object_name(false)?;
|
||||
parser.expect_keyword_is(Keyword::ON)?;
|
||||
let table_name = parser.parse_object_name(false)?;
|
||||
let period = parser.parse_trigger_period()?;
|
||||
let events = parser.parse_comma_separated(Parser::parse_trigger_event)?;
|
||||
|
||||
parser.expect_keyword_is(Keyword::AS)?;
|
||||
let statements = Some(parser.parse_conditional_statements(&[Keyword::END])?);
|
||||
|
||||
Ok(Statement::CreateTrigger {
|
||||
or_alter,
|
||||
or_replace: false,
|
||||
is_constraint: false,
|
||||
name,
|
||||
period,
|
||||
events,
|
||||
table_name,
|
||||
referenced_table_name: None,
|
||||
referencing: Vec::new(),
|
||||
trigger_object: TriggerObject::Statement,
|
||||
include_each: false,
|
||||
condition: None,
|
||||
exec_body: None,
|
||||
statements,
|
||||
characteristics: None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse a sequence of statements, optionally separated by semicolon.
|
||||
///
|
||||
/// Stops parsing when reaching EOF or the given keyword.
|
||||
fn parse_statement_list(
|
||||
&self,
|
||||
parser: &mut Parser,
|
||||
terminal_keyword: Option<Keyword>,
|
||||
) -> Result<Vec<Statement>, ParserError> {
|
||||
let mut stmts = Vec::new();
|
||||
loop {
|
||||
if let Token::EOF = parser.peek_token_ref().token {
|
||||
break;
|
||||
}
|
||||
if let Some(term) = terminal_keyword {
|
||||
if parser.peek_keyword(term) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
stmts.push(parser.parse_statement()?);
|
||||
while let Token::SemiColon = parser.peek_token_ref().token {
|
||||
parser.advance_token();
|
||||
}
|
||||
}
|
||||
Ok(stmts)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,49 +1,27 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::boxed::Box;
|
||||
use crate::dialect::Dialect;
|
||||
|
||||
use crate::{
|
||||
ast::{BinaryOperator, Expr, LockTable, LockTableType, Statement},
|
||||
dialect::Dialect,
|
||||
keywords::Keyword,
|
||||
parser::{Parser, ParserError},
|
||||
};
|
||||
|
||||
use super::keywords;
|
||||
|
||||
const RESERVED_FOR_TABLE_ALIAS_MYSQL: &[Keyword] = &[
|
||||
Keyword::USE,
|
||||
Keyword::IGNORE,
|
||||
Keyword::FORCE,
|
||||
Keyword::STRAIGHT_JOIN,
|
||||
];
|
||||
|
||||
/// A [`Dialect`] for [MySQL](https://www.mysql.com/)
|
||||
#[derive(Debug)]
|
||||
pub struct MySqlDialect {}
|
||||
|
||||
impl Dialect for MySqlDialect {
|
||||
fn is_identifier_start(&self, ch: char) -> bool {
|
||||
// See https://dev.mysql.com/doc/refman/8.0/en/identifiers.html.
|
||||
// Identifiers which begin with a digit are recognized while tokenizing numbers,
|
||||
// so they can be distinguished from exponent numeric literals.
|
||||
ch.is_alphabetic()
|
||||
// We don't yet support identifiers beginning with numbers, as that
|
||||
// makes it hard to distinguish numeric literals.
|
||||
('a'..='z').contains(&ch)
|
||||
|| ('A'..='Z').contains(&ch)
|
||||
|| ch == '_'
|
||||
|| ch == '$'
|
||||
|| ch == '@'
|
||||
|
@ -51,149 +29,10 @@ impl Dialect for MySqlDialect {
|
|||
}
|
||||
|
||||
fn is_identifier_part(&self, ch: char) -> bool {
|
||||
self.is_identifier_start(ch) || ch.is_ascii_digit()
|
||||
self.is_identifier_start(ch) || ('0'..='9').contains(&ch)
|
||||
}
|
||||
|
||||
fn is_delimited_identifier_start(&self, ch: char) -> bool {
|
||||
ch == '`'
|
||||
}
|
||||
|
||||
fn identifier_quote_style(&self, _identifier: &str) -> Option<char> {
|
||||
Some('`')
|
||||
}
|
||||
|
||||
// See https://dev.mysql.com/doc/refman/8.0/en/string-literals.html#character-escape-sequences
|
||||
fn supports_string_literal_backslash_escape(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn ignores_wildcard_escapes(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_numeric_prefix(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn parse_infix(
|
||||
&self,
|
||||
parser: &mut crate::parser::Parser,
|
||||
expr: &crate::ast::Expr,
|
||||
_precedence: u8,
|
||||
) -> Option<Result<crate::ast::Expr, ParserError>> {
|
||||
// Parse DIV as an operator
|
||||
if parser.parse_keyword(Keyword::DIV) {
|
||||
Some(Ok(Expr::BinaryOp {
|
||||
left: Box::new(expr.clone()),
|
||||
op: BinaryOperator::MyIntegerDivide,
|
||||
right: Box::new(parser.parse_expr().unwrap()),
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||
if parser.parse_keywords(&[Keyword::LOCK, Keyword::TABLES]) {
|
||||
Some(parse_lock_tables(parser))
|
||||
} else if parser.parse_keywords(&[Keyword::UNLOCK, Keyword::TABLES]) {
|
||||
Some(parse_unlock_tables(parser))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn require_interval_qualifier(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_limit_comma(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See: <https://dev.mysql.com/doc/refman/8.4/en/create-table-select.html>
|
||||
fn supports_create_table_select(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See: <https://dev.mysql.com/doc/refman/8.4/en/insert.html>
|
||||
fn supports_insert_set(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_user_host_grantee(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
|
||||
explicit
|
||||
|| (!keywords::RESERVED_FOR_TABLE_ALIAS.contains(kw)
|
||||
&& !RESERVED_FOR_TABLE_ALIAS_MYSQL.contains(kw))
|
||||
}
|
||||
|
||||
fn supports_table_hints(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn requires_single_line_comment_whitespace(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_match_against(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_set_names(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_comma_separated_set_assignments(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
/// `LOCK TABLES`
|
||||
/// <https://dev.mysql.com/doc/refman/8.0/en/lock-tables.html>
|
||||
fn parse_lock_tables(parser: &mut Parser) -> Result<Statement, ParserError> {
|
||||
let tables = parser.parse_comma_separated(parse_lock_table)?;
|
||||
Ok(Statement::LockTables { tables })
|
||||
}
|
||||
|
||||
// tbl_name [[AS] alias] lock_type
|
||||
fn parse_lock_table(parser: &mut Parser) -> Result<LockTable, ParserError> {
|
||||
let table = parser.parse_identifier()?;
|
||||
let alias =
|
||||
parser.parse_optional_alias(&[Keyword::READ, Keyword::WRITE, Keyword::LOW_PRIORITY])?;
|
||||
let lock_type = parse_lock_tables_type(parser)?;
|
||||
|
||||
Ok(LockTable {
|
||||
table,
|
||||
alias,
|
||||
lock_type,
|
||||
})
|
||||
}
|
||||
|
||||
// READ [LOCAL] | [LOW_PRIORITY] WRITE
|
||||
fn parse_lock_tables_type(parser: &mut Parser) -> Result<LockTableType, ParserError> {
|
||||
if parser.parse_keyword(Keyword::READ) {
|
||||
if parser.parse_keyword(Keyword::LOCAL) {
|
||||
Ok(LockTableType::Read { local: true })
|
||||
} else {
|
||||
Ok(LockTableType::Read { local: false })
|
||||
}
|
||||
} else if parser.parse_keyword(Keyword::WRITE) {
|
||||
Ok(LockTableType::Write {
|
||||
low_priority: false,
|
||||
})
|
||||
} else if parser.parse_keywords(&[Keyword::LOW_PRIORITY, Keyword::WRITE]) {
|
||||
Ok(LockTableType::Write { low_priority: true })
|
||||
} else {
|
||||
parser.expected("an lock type in LOCK TABLES", parser.peek_token())
|
||||
}
|
||||
}
|
||||
|
||||
/// UNLOCK TABLES
|
||||
/// <https://dev.mysql.com/doc/refman/8.0/en/lock-tables.html>
|
||||
fn parse_unlock_tables(_parser: &mut Parser) -> Result<Statement, ParserError> {
|
||||
Ok(Statement::UnlockTables)
|
||||
}
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
|
@ -26,240 +9,70 @@
|
|||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
use log::debug;
|
||||
|
||||
use crate::dialect::{Dialect, Precedence};
|
||||
use crate::ast::{CommentObject, Statement};
|
||||
use crate::dialect::Dialect;
|
||||
use crate::keywords::Keyword;
|
||||
use crate::parser::{Parser, ParserError};
|
||||
use crate::tokenizer::Token;
|
||||
|
||||
/// A [`Dialect`] for [PostgreSQL](https://www.postgresql.org/)
|
||||
#[derive(Debug)]
|
||||
pub struct PostgreSqlDialect {}
|
||||
|
||||
const PERIOD_PREC: u8 = 200;
|
||||
const DOUBLE_COLON_PREC: u8 = 140;
|
||||
const BRACKET_PREC: u8 = 130;
|
||||
const COLLATE_PREC: u8 = 120;
|
||||
const AT_TZ_PREC: u8 = 110;
|
||||
const CARET_PREC: u8 = 100;
|
||||
const MUL_DIV_MOD_OP_PREC: u8 = 90;
|
||||
const PLUS_MINUS_PREC: u8 = 80;
|
||||
// there's no XOR operator in PostgreSQL, but support it here to avoid breaking tests
|
||||
const XOR_PREC: u8 = 75;
|
||||
const PG_OTHER_PREC: u8 = 70;
|
||||
const BETWEEN_LIKE_PREC: u8 = 60;
|
||||
const EQ_PREC: u8 = 50;
|
||||
const IS_PREC: u8 = 40;
|
||||
const NOT_PREC: u8 = 30;
|
||||
const AND_PREC: u8 = 20;
|
||||
const OR_PREC: u8 = 10;
|
||||
|
||||
impl Dialect for PostgreSqlDialect {
|
||||
fn identifier_quote_style(&self, _identifier: &str) -> Option<char> {
|
||||
Some('"')
|
||||
}
|
||||
|
||||
fn is_delimited_identifier_start(&self, ch: char) -> bool {
|
||||
ch == '"' // Postgres does not support backticks to quote identifiers
|
||||
}
|
||||
|
||||
fn is_identifier_start(&self, ch: char) -> bool {
|
||||
// See https://www.postgresql.org/docs/11/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
|
||||
// We don't yet support identifiers beginning with "letters with
|
||||
// diacritical marks"
|
||||
ch.is_alphabetic() || ch == '_'
|
||||
// diacritical marks and non-Latin letters"
|
||||
('a'..='z').contains(&ch) || ('A'..='Z').contains(&ch) || ch == '_'
|
||||
}
|
||||
|
||||
fn is_identifier_part(&self, ch: char) -> bool {
|
||||
ch.is_alphabetic() || ch.is_ascii_digit() || ch == '$' || ch == '_'
|
||||
('a'..='z').contains(&ch)
|
||||
|| ('A'..='Z').contains(&ch)
|
||||
|| ('0'..='9').contains(&ch)
|
||||
|| ch == '$'
|
||||
|| ch == '_'
|
||||
}
|
||||
|
||||
fn supports_unicode_string_literal(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://www.postgresql.org/docs/current/sql-createoperator.html>
|
||||
fn is_custom_operator_part(&self, ch: char) -> bool {
|
||||
matches!(
|
||||
ch,
|
||||
'+' | '-'
|
||||
| '*'
|
||||
| '/'
|
||||
| '<'
|
||||
| '>'
|
||||
| '='
|
||||
| '~'
|
||||
| '!'
|
||||
| '@'
|
||||
| '#'
|
||||
| '%'
|
||||
| '^'
|
||||
| '&'
|
||||
| '|'
|
||||
| '`'
|
||||
| '?'
|
||||
)
|
||||
}
|
||||
|
||||
fn get_next_precedence(&self, parser: &Parser) -> Option<Result<u8, ParserError>> {
|
||||
let token = parser.peek_token();
|
||||
debug!("get_next_precedence() {token:?}");
|
||||
|
||||
// we only return some custom value here when the behaviour (not merely the numeric value) differs
|
||||
// from the default implementation
|
||||
match token.token {
|
||||
Token::Word(w) if w.keyword == Keyword::COLLATE => Some(Ok(COLLATE_PREC)),
|
||||
Token::LBracket => Some(Ok(BRACKET_PREC)),
|
||||
Token::Arrow
|
||||
| Token::LongArrow
|
||||
| Token::HashArrow
|
||||
| Token::HashLongArrow
|
||||
| Token::AtArrow
|
||||
| Token::ArrowAt
|
||||
| Token::HashMinus
|
||||
| Token::AtQuestion
|
||||
| Token::AtAt
|
||||
| Token::Question
|
||||
| Token::QuestionAnd
|
||||
| Token::QuestionPipe
|
||||
| Token::ExclamationMark
|
||||
| Token::Overlap
|
||||
| Token::CaretAt
|
||||
| Token::StringConcat
|
||||
| Token::Sharp
|
||||
| Token::ShiftRight
|
||||
| Token::ShiftLeft
|
||||
| Token::CustomBinaryOperator(_) => Some(Ok(PG_OTHER_PREC)),
|
||||
_ => None,
|
||||
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||
if parser.parse_keyword(Keyword::COMMENT) {
|
||||
Some(parse_comment(parser))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn supports_filter_during_aggregation(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_group_by_expr(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn prec_value(&self, prec: Precedence) -> u8 {
|
||||
match prec {
|
||||
Precedence::Period => PERIOD_PREC,
|
||||
Precedence::DoubleColon => DOUBLE_COLON_PREC,
|
||||
Precedence::AtTz => AT_TZ_PREC,
|
||||
Precedence::MulDivModOp => MUL_DIV_MOD_OP_PREC,
|
||||
Precedence::PlusMinus => PLUS_MINUS_PREC,
|
||||
Precedence::Xor => XOR_PREC,
|
||||
Precedence::Ampersand => PG_OTHER_PREC,
|
||||
Precedence::Caret => CARET_PREC,
|
||||
Precedence::Pipe => PG_OTHER_PREC,
|
||||
Precedence::Between => BETWEEN_LIKE_PREC,
|
||||
Precedence::Eq => EQ_PREC,
|
||||
Precedence::Like => BETWEEN_LIKE_PREC,
|
||||
Precedence::Is => IS_PREC,
|
||||
Precedence::PgOther => PG_OTHER_PREC,
|
||||
Precedence::UnaryNot => NOT_PREC,
|
||||
Precedence::And => AND_PREC,
|
||||
Precedence::Or => OR_PREC,
|
||||
}
|
||||
}
|
||||
|
||||
fn allow_extract_custom(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn allow_extract_single_quotes(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_create_index_with_clause(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// see <https://www.postgresql.org/docs/current/sql-explain.html>
|
||||
fn supports_explain_with_utility_options(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// see <https://www.postgresql.org/docs/current/sql-listen.html>
|
||||
/// see <https://www.postgresql.org/docs/current/sql-unlisten.html>
|
||||
/// see <https://www.postgresql.org/docs/current/sql-notify.html>
|
||||
fn supports_listen_notify(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// see <https://www.postgresql.org/docs/13/functions-math.html>
|
||||
fn supports_factorial_operator(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// see <https://www.postgresql.org/docs/current/sql-comment.html>
|
||||
fn supports_comment_on(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://www.postgresql.org/docs/current/sql-load.html>
|
||||
fn supports_load_extension(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>
|
||||
///
|
||||
/// Required to support the colon in:
|
||||
/// ```sql
|
||||
/// SELECT json_object('a': 'b')
|
||||
/// ```
|
||||
fn supports_named_fn_args_with_colon_operator(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See <https://www.postgresql.org/docs/current/functions-json.html>
|
||||
///
|
||||
/// Required to support the label in:
|
||||
/// ```sql
|
||||
/// SELECT json_object('label': 'value')
|
||||
/// ```
|
||||
fn supports_named_fn_args_with_expr_name(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// Return true if the dialect supports empty projections in SELECT statements
|
||||
///
|
||||
/// Example
|
||||
/// ```sql
|
||||
/// SELECT from table_name
|
||||
/// ```
|
||||
fn supports_empty_projections(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_nested_comments(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_string_escape_constant(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_numeric_literal_underscores(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// See: <https://www.postgresql.org/docs/current/arrays.html#ARRAYS-DECLARATION>
|
||||
fn supports_array_typedef_with_brackets(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_geometric_types(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_set_names(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_alter_column_type_using(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_comment(parser: &mut Parser) -> Result<Statement, ParserError> {
|
||||
parser.expect_keyword(Keyword::ON)?;
|
||||
let token = parser.next_token();
|
||||
|
||||
let (object_type, object_name) = match token {
|
||||
Token::Word(w) if w.keyword == Keyword::COLUMN => {
|
||||
let object_name = parser.parse_object_name()?;
|
||||
(CommentObject::Column, object_name)
|
||||
}
|
||||
Token::Word(w) if w.keyword == Keyword::TABLE => {
|
||||
let object_name = parser.parse_object_name()?;
|
||||
(CommentObject::Table, object_name)
|
||||
}
|
||||
_ => parser.expected("comment object_type", token)?,
|
||||
};
|
||||
|
||||
parser.expect_keyword(Keyword::IS)?;
|
||||
let comment = if parser.parse_keyword(Keyword::NULL) {
|
||||
None
|
||||
} else {
|
||||
Some(parser.parse_literal_string()?)
|
||||
};
|
||||
Ok(Statement::Comment {
|
||||
object_type,
|
||||
object_name,
|
||||
comment,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,19 +1,14 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use crate::dialect::Dialect;
|
||||
use core::iter::Peekable;
|
||||
|
@ -21,7 +16,6 @@ use core::str::Chars;
|
|||
|
||||
use super::PostgreSqlDialect;
|
||||
|
||||
/// A [`Dialect`] for [RedShift](https://aws.amazon.com/redshift/)
|
||||
#[derive(Debug)]
|
||||
pub struct RedshiftSqlDialect {}
|
||||
|
||||
|
@ -32,103 +26,30 @@ pub struct RedshiftSqlDialect {}
|
|||
// in the Postgres dialect, the query will be parsed as an array, while in the Redshift dialect it will
|
||||
// be a json path
|
||||
impl Dialect for RedshiftSqlDialect {
|
||||
/// Determine if a character starts a potential nested quoted identifier.
|
||||
/// Example: RedShift supports the following quote styles to all mean the same thing:
|
||||
/// ```sql
|
||||
/// SELECT 1 AS foo;
|
||||
/// SELECT 1 AS "foo";
|
||||
/// SELECT 1 AS [foo];
|
||||
/// SELECT 1 AS ["foo"];
|
||||
/// ```
|
||||
fn is_nested_delimited_identifier_start(&self, ch: char) -> bool {
|
||||
ch == '['
|
||||
fn is_delimited_identifier_start(&self, ch: char) -> bool {
|
||||
ch == '"' || ch == '['
|
||||
}
|
||||
|
||||
/// Only applicable whenever [`Self::is_nested_delimited_identifier_start`] returns true
|
||||
/// If the next sequence of tokens potentially represent a nested identifier, then this method
|
||||
/// returns a tuple containing the outer quote style, and if present, the inner (nested) quote style.
|
||||
///
|
||||
/// Example (Redshift):
|
||||
/// ```text
|
||||
/// `["foo"]` => Some(`[`, Some(`"`))
|
||||
/// `[foo]` => Some(`[`, None)
|
||||
/// `[0]` => None
|
||||
/// `"foo"` => None
|
||||
/// ```
|
||||
fn peek_nested_delimited_identifier_quotes(
|
||||
&self,
|
||||
mut chars: Peekable<Chars<'_>>,
|
||||
) -> Option<(char, Option<char>)> {
|
||||
if chars.peek() != Some(&'[') {
|
||||
return None;
|
||||
}
|
||||
|
||||
/// Determine if quoted characters are proper for identifier
|
||||
/// It's needed to distinguish treating square brackets as quotes from
|
||||
/// treating them as json path. If there is identifier then we assume
|
||||
/// there is no json path.
|
||||
fn is_proper_identifier_inside_quotes(&self, mut chars: Peekable<Chars<'_>>) -> bool {
|
||||
chars.next();
|
||||
|
||||
let mut not_white_chars = chars.skip_while(|ch| ch.is_whitespace()).peekable();
|
||||
|
||||
if let Some(&ch) = not_white_chars.peek() {
|
||||
if ch == '"' {
|
||||
return Some(('[', Some('"')));
|
||||
}
|
||||
if self.is_identifier_start(ch) {
|
||||
return Some(('[', None));
|
||||
}
|
||||
return self.is_identifier_start(ch);
|
||||
}
|
||||
|
||||
None
|
||||
false
|
||||
}
|
||||
|
||||
fn is_identifier_start(&self, ch: char) -> bool {
|
||||
// Extends Postgres dialect with sharp and UTF-8 multibyte chars
|
||||
// https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
|
||||
PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#' || !ch.is_ascii()
|
||||
// Extends Postgres dialect with sharp
|
||||
PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#'
|
||||
}
|
||||
|
||||
fn is_identifier_part(&self, ch: char) -> bool {
|
||||
// Extends Postgres dialect with sharp and UTF-8 multibyte chars
|
||||
// https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
|
||||
PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#' || !ch.is_ascii()
|
||||
}
|
||||
|
||||
/// redshift has `CONVERT(type, value)` instead of `CONVERT(value, type)`
|
||||
/// <https://docs.aws.amazon.com/redshift/latest/dg/r_CONVERT_function.html>
|
||||
fn convert_type_before_value(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_connect_by(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// Redshift expects the `TOP` option before the `ALL/DISTINCT` option:
|
||||
/// <https://docs.aws.amazon.com/redshift/latest/dg/r_SELECT_list.html#r_SELECT_list-parameters>
|
||||
fn supports_top_before_distinct(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
/// Redshift supports PartiQL: <https://docs.aws.amazon.com/redshift/latest/dg/super-overview.html>
|
||||
fn supports_partiql(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_string_escape_constant(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_geometric_types(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_array_typedef_with_brackets(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn allow_extract_single_quotes(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_string_literal_backslash_escape(&self) -> bool {
|
||||
true
|
||||
// Extends Postgres dialect with sharp
|
||||
PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#'
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,35 +1,20 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::boxed::Box;
|
||||
|
||||
use crate::ast::BinaryOperator;
|
||||
use crate::ast::{Expr, Statement};
|
||||
use crate::ast::Statement;
|
||||
use crate::dialect::Dialect;
|
||||
use crate::keywords::Keyword;
|
||||
use crate::parser::{Parser, ParserError};
|
||||
|
||||
/// A [`Dialect`] for [SQLite](https://www.sqlite.org)
|
||||
///
|
||||
/// This dialect allows columns in a
|
||||
/// [`CREATE TABLE`](https://sqlite.org/lang_createtable.html) statement with no
|
||||
/// type specified, as in `CREATE TABLE t1 (a)`. In the AST, these columns will
|
||||
/// have the data type [`Unspecified`](crate::ast::DataType::Unspecified).
|
||||
#[derive(Debug)]
|
||||
pub struct SQLiteDialect {}
|
||||
|
||||
|
@ -41,28 +26,17 @@ impl Dialect for SQLiteDialect {
|
|||
ch == '`' || ch == '"' || ch == '['
|
||||
}
|
||||
|
||||
fn identifier_quote_style(&self, _identifier: &str) -> Option<char> {
|
||||
Some('`')
|
||||
}
|
||||
|
||||
fn is_identifier_start(&self, ch: char) -> bool {
|
||||
// See https://www.sqlite.org/draft/tokenreq.html
|
||||
ch.is_ascii_lowercase()
|
||||
|| ch.is_ascii_uppercase()
|
||||
('a'..='z').contains(&ch)
|
||||
|| ('A'..='Z').contains(&ch)
|
||||
|| ch == '_'
|
||||
|| ch == '$'
|
||||
|| ('\u{007f}'..='\u{ffff}').contains(&ch)
|
||||
}
|
||||
|
||||
fn supports_filter_during_aggregation(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_start_transaction_modifier(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn is_identifier_part(&self, ch: char) -> bool {
|
||||
self.is_identifier_start(ch) || ch.is_ascii_digit()
|
||||
self.is_identifier_start(ch) || ('0'..='9').contains(&ch)
|
||||
}
|
||||
|
||||
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||
|
@ -73,41 +47,4 @@ impl Dialect for SQLiteDialect {
|
|||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_infix(
|
||||
&self,
|
||||
parser: &mut crate::parser::Parser,
|
||||
expr: &crate::ast::Expr,
|
||||
_precedence: u8,
|
||||
) -> Option<Result<crate::ast::Expr, ParserError>> {
|
||||
// Parse MATCH and REGEXP as operators
|
||||
// See <https://www.sqlite.org/lang_expr.html#the_like_glob_regexp_match_and_extract_operators>
|
||||
for (keyword, op) in [
|
||||
(Keyword::REGEXP, BinaryOperator::Regexp),
|
||||
(Keyword::MATCH, BinaryOperator::Match),
|
||||
] {
|
||||
if parser.parse_keyword(keyword) {
|
||||
let left = Box::new(expr.clone());
|
||||
let right = Box::new(parser.parse_expr().unwrap());
|
||||
return Some(Ok(Expr::BinaryOp { left, op, right }));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn supports_in_empty_list(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_limit_comma(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_asc_desc_in_column_definition(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn supports_dollar_placeholder(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,135 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
//! Utilities for formatting SQL AST nodes with pretty printing support.
|
||||
//!
|
||||
//! The module provides formatters that implement the `Display` trait with support
|
||||
//! for both regular (`{}`) and pretty (`{:#}`) formatting modes. Pretty printing
|
||||
//! adds proper indentation and line breaks to make SQL statements more readable.
|
||||
|
||||
use core::fmt::{self, Display, Write};
|
||||
|
||||
/// A wrapper around a value that adds an indent to the value when displayed with {:#}.
|
||||
pub(crate) struct Indent<T>(pub T);
|
||||
|
||||
const INDENT: &str = " ";
|
||||
|
||||
impl<T> Display for Indent<T>
|
||||
where
|
||||
T: Display,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if f.alternate() {
|
||||
f.write_str(INDENT)?;
|
||||
write!(Indent(f), "{:#}", self.0)
|
||||
} else {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds an indent to the inner writer
|
||||
impl<T> Write for Indent<T>
|
||||
where
|
||||
T: Write,
|
||||
{
|
||||
fn write_str(&mut self, s: &str) -> fmt::Result {
|
||||
self.0.write_str(s)?;
|
||||
// Our NewLine and SpaceOrNewline utils always print individual newlines as a single-character string.
|
||||
if s == "\n" {
|
||||
self.0.write_str(INDENT)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A value that inserts a newline when displayed with {:#}, but not when displayed with {}.
|
||||
pub(crate) struct NewLine;
|
||||
|
||||
impl Display for NewLine {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if f.alternate() {
|
||||
f.write_char('\n')
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A value that inserts a space when displayed with {}, but a newline when displayed with {:#}.
|
||||
pub(crate) struct SpaceOrNewline;
|
||||
|
||||
impl Display for SpaceOrNewline {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if f.alternate() {
|
||||
f.write_char('\n')
|
||||
} else {
|
||||
f.write_char(' ')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A value that displays a comma-separated list of values.
|
||||
/// When pretty-printed (using {:#}), it displays each value on a new line.
|
||||
pub(crate) struct DisplayCommaSeparated<'a, T: fmt::Display>(pub(crate) &'a [T]);
|
||||
|
||||
impl<T: fmt::Display> fmt::Display for DisplayCommaSeparated<'_, T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut first = true;
|
||||
for t in self.0 {
|
||||
if !first {
|
||||
f.write_char(',')?;
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
}
|
||||
first = false;
|
||||
t.fmt(f)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Displays a whitespace, followed by a comma-separated list that is indented when pretty-printed.
|
||||
pub(crate) fn indented_list<T: fmt::Display>(f: &mut fmt::Formatter, items: &[T]) -> fmt::Result {
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
Indent(DisplayCommaSeparated(items)).fmt(f)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_indent() {
|
||||
struct TwoLines;
|
||||
|
||||
impl Display for TwoLines {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("line 1")?;
|
||||
SpaceOrNewline.fmt(f)?;
|
||||
f.write_str("line 2")
|
||||
}
|
||||
}
|
||||
|
||||
let indent = Indent(TwoLines);
|
||||
assert_eq!(
|
||||
indent.to_string(),
|
||||
TwoLines.to_string(),
|
||||
"Only the alternate form should be indented"
|
||||
);
|
||||
assert_eq!(format!("{:#}", indent), " line 1\n line 2");
|
||||
}
|
||||
}
|
529
src/keywords.rs
529
src/keywords.rs
File diff suppressed because it is too large
Load diff
148
src/lib.rs
148
src/lib.rs
|
@ -1,35 +1,21 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//! # SQL Parser for Rust
|
||||
//! SQL Parser for Rust
|
||||
//!
|
||||
//! Example code:
|
||||
//!
|
||||
//! This crate provides an ANSI:SQL 2011 lexer and parser that can parse SQL
|
||||
//! into an Abstract Syntax Tree ([`AST`]). See the [sqlparser crates.io page]
|
||||
//! for more information.
|
||||
//!
|
||||
//! For more information:
|
||||
//! 1. [`Parser::parse_sql`] and [`Parser::new`] for the Parsing API
|
||||
//! 2. [`ast`] for the AST structure
|
||||
//! 3. [`Dialect`] for supported SQL dialects
|
||||
//! 4. [`Spanned`] for source text locations (see "Source Spans" below for details)
|
||||
//!
|
||||
//! [`Spanned`]: ast::Spanned
|
||||
//!
|
||||
//! # Example parsing SQL text
|
||||
//! into an Abstract Syntax Tree (AST).
|
||||
//!
|
||||
//! ```
|
||||
//! use sqlparser::dialect::GenericDialect;
|
||||
|
@ -46,116 +32,9 @@
|
|||
//!
|
||||
//! println!("AST: {:?}", ast);
|
||||
//! ```
|
||||
//!
|
||||
//! # Creating SQL text from AST
|
||||
//!
|
||||
//! This crate allows users to recover the original SQL text (with comments
|
||||
//! removed, normalized whitespace and identifier capitalization), which is
|
||||
//! useful for tools that analyze and manipulate SQL.
|
||||
//!
|
||||
//! ```
|
||||
//! # use sqlparser::dialect::GenericDialect;
|
||||
//! # use sqlparser::parser::Parser;
|
||||
//! let sql = "SELECT a FROM table_1";
|
||||
//!
|
||||
//! // parse to a Vec<Statement>
|
||||
//! let ast = Parser::parse_sql(&GenericDialect, sql).unwrap();
|
||||
//!
|
||||
//! // The original SQL text can be generated from the AST
|
||||
//! assert_eq!(ast[0].to_string(), sql);
|
||||
//! ```
|
||||
//!
|
||||
//! # Pretty Printing
|
||||
//!
|
||||
//! SQL statements can be pretty-printed with proper indentation and line breaks using the alternate flag (`{:#}`):
|
||||
//!
|
||||
//! ```
|
||||
//! # use sqlparser::dialect::GenericDialect;
|
||||
//! # use sqlparser::parser::Parser;
|
||||
//! let sql = "SELECT a, b FROM table_1";
|
||||
//! let ast = Parser::parse_sql(&GenericDialect, sql).unwrap();
|
||||
//!
|
||||
//! // Pretty print with indentation and line breaks
|
||||
//! let pretty_sql = format!("{:#}", ast[0]);
|
||||
//! assert_eq!(pretty_sql, r#"
|
||||
//! SELECT
|
||||
//! a,
|
||||
//! b
|
||||
//! FROM
|
||||
//! table_1
|
||||
//! "#.trim());
|
||||
//! ```
|
||||
//! [sqlparser crates.io page]: https://crates.io/crates/sqlparser
|
||||
//! [`Parser::parse_sql`]: crate::parser::Parser::parse_sql
|
||||
//! [`Parser::new`]: crate::parser::Parser::new
|
||||
//! [`AST`]: crate::ast
|
||||
//! [`ast`]: crate::ast
|
||||
//! [`Dialect`]: crate::dialect::Dialect
|
||||
//!
|
||||
//! # Source Spans
|
||||
//!
|
||||
//! Starting with version `0.53.0` sqlparser introduced source spans to the
|
||||
//! AST. This feature provides source information for syntax errors, enabling
|
||||
//! better error messages. See [issue #1548] for more information and the
|
||||
//! [`Spanned`] trait to access the spans.
|
||||
//!
|
||||
//! [issue #1548]: https://github.com/apache/datafusion-sqlparser-rs/issues/1548
|
||||
//! [`Spanned`]: ast::Spanned
|
||||
//!
|
||||
//! ## Migration Guide
|
||||
//!
|
||||
//! For the next few releases, we will be incrementally adding source spans to the
|
||||
//! AST nodes, trying to minimize the impact on existing users. Some breaking
|
||||
//! changes are inevitable, and the following is a summary of the changes:
|
||||
//!
|
||||
//! #### New fields for spans (must be added to any existing pattern matches)
|
||||
//!
|
||||
//! The primary change is that new fields will be added to AST nodes to store the source `Span` or `TokenWithLocation`.
|
||||
//!
|
||||
//! This will require
|
||||
//! 1. Adding new fields to existing pattern matches.
|
||||
//! 2. Filling in the proper span information when constructing AST nodes.
|
||||
//!
|
||||
//! For example, since `Ident` now stores a `Span`, to construct an `Ident` you
|
||||
//! must provide now provide one:
|
||||
//!
|
||||
//! Previously:
|
||||
//! ```text
|
||||
//! # use sqlparser::ast::Ident;
|
||||
//! Ident {
|
||||
//! value: "name".into(),
|
||||
//! quote_style: None,
|
||||
//! }
|
||||
//! ```
|
||||
//! Now
|
||||
//! ```rust
|
||||
//! # use sqlparser::ast::Ident;
|
||||
//! # use sqlparser::tokenizer::Span;
|
||||
//! Ident {
|
||||
//! value: "name".into(),
|
||||
//! quote_style: None,
|
||||
//! span: Span::empty(),
|
||||
//! };
|
||||
//! ```
|
||||
//!
|
||||
//! Similarly, when pattern matching on `Ident`, you must now account for the
|
||||
//! `span` field.
|
||||
//!
|
||||
//! #### Misc.
|
||||
//! - [`TokenWithLocation`] stores a full `Span`, rather than just a source location.
|
||||
//! Users relying on `token.location` should use `token.location.start` instead.
|
||||
//!
|
||||
//![`TokenWithLocation`]: tokenizer::TokenWithLocation
|
||||
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
#![allow(clippy::upper_case_acronyms)]
|
||||
// Permit large enum variants to keep a unified, expressive AST.
|
||||
// Splitting complex nodes (expressions, statements, types) into separate types
|
||||
// would bloat the API and hide intent. Extra memory is a worthwhile tradeoff.
|
||||
#![allow(clippy::large_enum_variant)]
|
||||
|
||||
// Allow proc-macros to find this crate
|
||||
extern crate self as sqlparser;
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
extern crate alloc;
|
||||
|
@ -167,7 +46,6 @@ extern crate pretty_assertions;
|
|||
pub mod ast;
|
||||
#[macro_use]
|
||||
pub mod dialect;
|
||||
mod display_utils;
|
||||
pub mod keywords;
|
||||
pub mod parser;
|
||||
pub mod tokenizer;
|
||||
|
|
5271
src/parser.rs
Normal file
5271
src/parser.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -1,308 +0,0 @@
|
|||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//! SQL Parser for ALTER
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec;
|
||||
|
||||
use super::{Parser, ParserError};
|
||||
use crate::{
|
||||
ast::{
|
||||
AlterConnectorOwner, AlterPolicyOperation, AlterRoleOperation, Expr, Password, ResetConfig,
|
||||
RoleOption, SetConfigValue, Statement,
|
||||
},
|
||||
dialect::{MsSqlDialect, PostgreSqlDialect},
|
||||
keywords::Keyword,
|
||||
tokenizer::Token,
|
||||
};
|
||||
|
||||
impl Parser<'_> {
|
||||
pub fn parse_alter_role(&mut self) -> Result<Statement, ParserError> {
|
||||
if dialect_of!(self is PostgreSqlDialect) {
|
||||
return self.parse_pg_alter_role();
|
||||
} else if dialect_of!(self is MsSqlDialect) {
|
||||
return self.parse_mssql_alter_role();
|
||||
}
|
||||
|
||||
Err(ParserError::ParserError(
|
||||
"ALTER ROLE is only support for PostgreSqlDialect, MsSqlDialect".into(),
|
||||
))
|
||||
}
|
||||
|
||||
/// Parse ALTER POLICY statement
|
||||
/// ```sql
|
||||
/// ALTER POLICY policy_name ON table_name [ RENAME TO new_name ]
|
||||
/// or
|
||||
/// ALTER POLICY policy_name ON table_name
|
||||
/// [ TO { role_name | PUBLIC | CURRENT_ROLE | CURRENT_USER | SESSION_USER } [, ...] ]
|
||||
/// [ USING ( using_expression ) ]
|
||||
/// [ WITH CHECK ( check_expression ) ]
|
||||
/// ```
|
||||
///
|
||||
/// [PostgreSQL](https://www.postgresql.org/docs/current/sql-alterpolicy.html)
|
||||
pub fn parse_alter_policy(&mut self) -> Result<Statement, ParserError> {
|
||||
let name = self.parse_identifier()?;
|
||||
self.expect_keyword_is(Keyword::ON)?;
|
||||
let table_name = self.parse_object_name(false)?;
|
||||
|
||||
if self.parse_keyword(Keyword::RENAME) {
|
||||
self.expect_keyword_is(Keyword::TO)?;
|
||||
let new_name = self.parse_identifier()?;
|
||||
Ok(Statement::AlterPolicy {
|
||||
name,
|
||||
table_name,
|
||||
operation: AlterPolicyOperation::Rename { new_name },
|
||||
})
|
||||
} else {
|
||||
let to = if self.parse_keyword(Keyword::TO) {
|
||||
Some(self.parse_comma_separated(|p| p.parse_owner())?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let using = if self.parse_keyword(Keyword::USING) {
|
||||
self.expect_token(&Token::LParen)?;
|
||||
let expr = self.parse_expr()?;
|
||||
self.expect_token(&Token::RParen)?;
|
||||
Some(expr)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
|
||||
self.expect_token(&Token::LParen)?;
|
||||
let expr = self.parse_expr()?;
|
||||
self.expect_token(&Token::RParen)?;
|
||||
Some(expr)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(Statement::AlterPolicy {
|
||||
name,
|
||||
table_name,
|
||||
operation: AlterPolicyOperation::Apply {
|
||||
to,
|
||||
using,
|
||||
with_check,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse an `ALTER CONNECTOR` statement
|
||||
/// ```sql
|
||||
/// ALTER CONNECTOR connector_name SET DCPROPERTIES(property_name=property_value, ...);
|
||||
///
|
||||
/// ALTER CONNECTOR connector_name SET URL new_url;
|
||||
///
|
||||
/// ALTER CONNECTOR connector_name SET OWNER [USER|ROLE] user_or_role;
|
||||
/// ```
|
||||
pub fn parse_alter_connector(&mut self) -> Result<Statement, ParserError> {
|
||||
let name = self.parse_identifier()?;
|
||||
self.expect_keyword_is(Keyword::SET)?;
|
||||
|
||||
let properties = match self.parse_options_with_keywords(&[Keyword::DCPROPERTIES])? {
|
||||
properties if !properties.is_empty() => Some(properties),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let url = if self.parse_keyword(Keyword::URL) {
|
||||
Some(self.parse_literal_string()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let owner = if self.parse_keywords(&[Keyword::OWNER, Keyword::USER]) {
|
||||
let owner = self.parse_identifier()?;
|
||||
Some(AlterConnectorOwner::User(owner))
|
||||
} else if self.parse_keywords(&[Keyword::OWNER, Keyword::ROLE]) {
|
||||
let owner = self.parse_identifier()?;
|
||||
Some(AlterConnectorOwner::Role(owner))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Statement::AlterConnector {
|
||||
name,
|
||||
properties,
|
||||
url,
|
||||
owner,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_mssql_alter_role(&mut self) -> Result<Statement, ParserError> {
|
||||
let role_name = self.parse_identifier()?;
|
||||
|
||||
let operation = if self.parse_keywords(&[Keyword::ADD, Keyword::MEMBER]) {
|
||||
let member_name = self.parse_identifier()?;
|
||||
AlterRoleOperation::AddMember { member_name }
|
||||
} else if self.parse_keywords(&[Keyword::DROP, Keyword::MEMBER]) {
|
||||
let member_name = self.parse_identifier()?;
|
||||
AlterRoleOperation::DropMember { member_name }
|
||||
} else if self.parse_keywords(&[Keyword::WITH, Keyword::NAME]) {
|
||||
if self.consume_token(&Token::Eq) {
|
||||
let role_name = self.parse_identifier()?;
|
||||
AlterRoleOperation::RenameRole { role_name }
|
||||
} else {
|
||||
return self.expected("= after WITH NAME ", self.peek_token());
|
||||
}
|
||||
} else {
|
||||
return self.expected("'ADD' or 'DROP' or 'WITH NAME'", self.peek_token());
|
||||
};
|
||||
|
||||
Ok(Statement::AlterRole {
|
||||
name: role_name,
|
||||
operation,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_pg_alter_role(&mut self) -> Result<Statement, ParserError> {
|
||||
let role_name = self.parse_identifier()?;
|
||||
|
||||
// [ IN DATABASE _`database_name`_ ]
|
||||
let in_database = if self.parse_keywords(&[Keyword::IN, Keyword::DATABASE]) {
|
||||
self.parse_object_name(false).ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let operation = if self.parse_keyword(Keyword::RENAME) {
|
||||
if self.parse_keyword(Keyword::TO) {
|
||||
let role_name = self.parse_identifier()?;
|
||||
AlterRoleOperation::RenameRole { role_name }
|
||||
} else {
|
||||
return self.expected("TO after RENAME", self.peek_token());
|
||||
}
|
||||
// SET
|
||||
} else if self.parse_keyword(Keyword::SET) {
|
||||
let config_name = self.parse_object_name(false)?;
|
||||
// FROM CURRENT
|
||||
if self.parse_keywords(&[Keyword::FROM, Keyword::CURRENT]) {
|
||||
AlterRoleOperation::Set {
|
||||
config_name,
|
||||
config_value: SetConfigValue::FromCurrent,
|
||||
in_database,
|
||||
}
|
||||
// { TO | = } { value | DEFAULT }
|
||||
} else if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
|
||||
if self.parse_keyword(Keyword::DEFAULT) {
|
||||
AlterRoleOperation::Set {
|
||||
config_name,
|
||||
config_value: SetConfigValue::Default,
|
||||
in_database,
|
||||
}
|
||||
} else if let Ok(expr) = self.parse_expr() {
|
||||
AlterRoleOperation::Set {
|
||||
config_name,
|
||||
config_value: SetConfigValue::Value(expr),
|
||||
in_database,
|
||||
}
|
||||
} else {
|
||||
self.expected("config value", self.peek_token())?
|
||||
}
|
||||
} else {
|
||||
self.expected("'TO' or '=' or 'FROM CURRENT'", self.peek_token())?
|
||||
}
|
||||
// RESET
|
||||
} else if self.parse_keyword(Keyword::RESET) {
|
||||
if self.parse_keyword(Keyword::ALL) {
|
||||
AlterRoleOperation::Reset {
|
||||
config_name: ResetConfig::ALL,
|
||||
in_database,
|
||||
}
|
||||
} else {
|
||||
let config_name = self.parse_object_name(false)?;
|
||||
AlterRoleOperation::Reset {
|
||||
config_name: ResetConfig::ConfigName(config_name),
|
||||
in_database,
|
||||
}
|
||||
}
|
||||
// option
|
||||
} else {
|
||||
// [ WITH ]
|
||||
let _ = self.parse_keyword(Keyword::WITH);
|
||||
// option
|
||||
let mut options = vec![];
|
||||
while let Some(opt) = self.maybe_parse(|parser| parser.parse_pg_role_option())? {
|
||||
options.push(opt);
|
||||
}
|
||||
// check option
|
||||
if options.is_empty() {
|
||||
return self.expected("option", self.peek_token())?;
|
||||
}
|
||||
|
||||
AlterRoleOperation::WithOptions { options }
|
||||
};
|
||||
|
||||
Ok(Statement::AlterRole {
|
||||
name: role_name,
|
||||
operation,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_pg_role_option(&mut self) -> Result<RoleOption, ParserError> {
|
||||
let option = match self.parse_one_of_keywords(&[
|
||||
Keyword::BYPASSRLS,
|
||||
Keyword::NOBYPASSRLS,
|
||||
Keyword::CONNECTION,
|
||||
Keyword::CREATEDB,
|
||||
Keyword::NOCREATEDB,
|
||||
Keyword::CREATEROLE,
|
||||
Keyword::NOCREATEROLE,
|
||||
Keyword::INHERIT,
|
||||
Keyword::NOINHERIT,
|
||||
Keyword::LOGIN,
|
||||
Keyword::NOLOGIN,
|
||||
Keyword::PASSWORD,
|
||||
Keyword::REPLICATION,
|
||||
Keyword::NOREPLICATION,
|
||||
Keyword::SUPERUSER,
|
||||
Keyword::NOSUPERUSER,
|
||||
Keyword::VALID,
|
||||
]) {
|
||||
Some(Keyword::BYPASSRLS) => RoleOption::BypassRLS(true),
|
||||
Some(Keyword::NOBYPASSRLS) => RoleOption::BypassRLS(false),
|
||||
Some(Keyword::CONNECTION) => {
|
||||
self.expect_keyword_is(Keyword::LIMIT)?;
|
||||
RoleOption::ConnectionLimit(Expr::Value(self.parse_number_value()?))
|
||||
}
|
||||
Some(Keyword::CREATEDB) => RoleOption::CreateDB(true),
|
||||
Some(Keyword::NOCREATEDB) => RoleOption::CreateDB(false),
|
||||
Some(Keyword::CREATEROLE) => RoleOption::CreateRole(true),
|
||||
Some(Keyword::NOCREATEROLE) => RoleOption::CreateRole(false),
|
||||
Some(Keyword::INHERIT) => RoleOption::Inherit(true),
|
||||
Some(Keyword::NOINHERIT) => RoleOption::Inherit(false),
|
||||
Some(Keyword::LOGIN) => RoleOption::Login(true),
|
||||
Some(Keyword::NOLOGIN) => RoleOption::Login(false),
|
||||
Some(Keyword::PASSWORD) => {
|
||||
let password = if self.parse_keyword(Keyword::NULL) {
|
||||
Password::NullPassword
|
||||
} else {
|
||||
Password::Password(Expr::Value(self.parse_value()?))
|
||||
};
|
||||
RoleOption::Password(password)
|
||||
}
|
||||
Some(Keyword::REPLICATION) => RoleOption::Replication(true),
|
||||
Some(Keyword::NOREPLICATION) => RoleOption::Replication(false),
|
||||
Some(Keyword::SUPERUSER) => RoleOption::SuperUser(true),
|
||||
Some(Keyword::NOSUPERUSER) => RoleOption::SuperUser(false),
|
||||
Some(Keyword::VALID) => {
|
||||
self.expect_keyword_is(Keyword::UNTIL)?;
|
||||
RoleOption::ValidUntil(Expr::Value(self.parse_value()?))
|
||||
}
|
||||
_ => self.expected("option", self.peek_token())?,
|
||||
};
|
||||
|
||||
Ok(option)
|
||||
}
|
||||
}
|
17174
src/parser/mod.rs
17174
src/parser/mod.rs
File diff suppressed because it is too large
Load diff
|
@ -1,26 +1,21 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
/// This module contains internal utilities used for testing the library.
|
||||
/// While technically public, the library's users are not supposed to rely
|
||||
/// on this module, as it will change without notice.
|
||||
//
|
||||
// Integration tests (i.e. everything under `tests/`) import this
|
||||
// via `tests/test_utils/helpers`.
|
||||
// via `tests/test_utils/mod.rs`.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{
|
||||
|
@ -31,62 +26,18 @@ use alloc::{
|
|||
};
|
||||
use core::fmt::Debug;
|
||||
|
||||
use crate::ast::*;
|
||||
use crate::dialect::*;
|
||||
use crate::parser::{Parser, ParserError};
|
||||
use crate::tokenizer::{Token, Tokenizer};
|
||||
use crate::{ast::*, parser::ParserOptions};
|
||||
|
||||
#[cfg(test)]
|
||||
use pretty_assertions::assert_eq;
|
||||
use crate::tokenizer::Tokenizer;
|
||||
|
||||
/// Tests use the methods on this struct to invoke the parser on one or
|
||||
/// multiple dialects.
|
||||
pub struct TestedDialects {
|
||||
pub dialects: Vec<Box<dyn Dialect>>,
|
||||
pub options: Option<ParserOptions>,
|
||||
pub recursion_limit: Option<usize>,
|
||||
}
|
||||
|
||||
impl TestedDialects {
|
||||
/// Create a TestedDialects with default options and the given dialects.
|
||||
pub fn new(dialects: Vec<Box<dyn Dialect>>) -> Self {
|
||||
Self {
|
||||
dialects,
|
||||
options: None,
|
||||
recursion_limit: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_with_options(dialects: Vec<Box<dyn Dialect>>, options: ParserOptions) -> Self {
|
||||
Self {
|
||||
dialects,
|
||||
options: Some(options),
|
||||
recursion_limit: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
|
||||
self.recursion_limit = Some(recursion_limit);
|
||||
self
|
||||
}
|
||||
|
||||
fn new_parser<'a>(&self, dialect: &'a dyn Dialect) -> Parser<'a> {
|
||||
let parser = Parser::new(dialect);
|
||||
let parser = if let Some(options) = &self.options {
|
||||
parser.with_options(options.clone())
|
||||
} else {
|
||||
parser
|
||||
};
|
||||
|
||||
let parser = if let Some(recursion_limit) = &self.recursion_limit {
|
||||
parser.with_recursion_limit(*recursion_limit)
|
||||
} else {
|
||||
parser
|
||||
};
|
||||
|
||||
parser
|
||||
}
|
||||
|
||||
/// Run the given function for all of `self.dialects`, assert that they
|
||||
/// return the same result, and return that result.
|
||||
pub fn one_of_identical_results<F, T: Debug + PartialEq>(&self, f: F) -> T
|
||||
|
@ -99,12 +50,13 @@ impl TestedDialects {
|
|||
if let Some((prev_dialect, prev_parsed)) = s {
|
||||
assert_eq!(
|
||||
prev_parsed, parsed,
|
||||
"Parse results with {prev_dialect:?} are different from {dialect:?}"
|
||||
"Parse results with {:?} are different from {:?}",
|
||||
prev_dialect, dialect
|
||||
);
|
||||
}
|
||||
Some((dialect, parsed))
|
||||
})
|
||||
.expect("tested dialects cannot be empty")
|
||||
.unwrap()
|
||||
.1
|
||||
}
|
||||
|
||||
|
@ -113,99 +65,46 @@ impl TestedDialects {
|
|||
F: Fn(&mut Parser) -> T,
|
||||
{
|
||||
self.one_of_identical_results(|dialect| {
|
||||
let mut parser = self.new_parser(dialect).try_with_sql(sql).unwrap();
|
||||
f(&mut parser)
|
||||
let mut tokenizer = Tokenizer::new(dialect, sql);
|
||||
let tokens = tokenizer.tokenize().unwrap();
|
||||
f(&mut Parser::new(tokens, dialect))
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses a single SQL string into multiple statements, ensuring
|
||||
/// the result is the same for all tested dialects.
|
||||
pub fn parse_sql_statements(&self, sql: &str) -> Result<Vec<Statement>, ParserError> {
|
||||
self.one_of_identical_results(|dialect| {
|
||||
let mut tokenizer = Tokenizer::new(dialect, sql);
|
||||
if let Some(options) = &self.options {
|
||||
tokenizer = tokenizer.with_unescape(options.unescape);
|
||||
}
|
||||
let tokens = tokenizer.tokenize()?;
|
||||
self.new_parser(dialect)
|
||||
.with_tokens(tokens)
|
||||
.parse_statements()
|
||||
})
|
||||
self.one_of_identical_results(|dialect| Parser::parse_sql(dialect, sql))
|
||||
// To fail the `ensure_multiple_dialects_are_tested` test:
|
||||
// Parser::parse_sql(&**self.dialects.first().unwrap(), sql)
|
||||
}
|
||||
|
||||
/// Ensures that `sql` parses as a single [Statement] for all tested
|
||||
/// dialects.
|
||||
///
|
||||
/// In general, the canonical SQL should be the same (see crate
|
||||
/// documentation for rationale) and you should prefer the `verified_`
|
||||
/// variants in testing, such as [`verified_statement`] or
|
||||
/// [`verified_query`].
|
||||
///
|
||||
/// If `canonical` is non empty,this function additionally asserts
|
||||
/// that:
|
||||
///
|
||||
/// 1. parsing `sql` results in the same [`Statement`] as parsing
|
||||
/// `canonical`.
|
||||
///
|
||||
/// 2. re-serializing the result of parsing `sql` produces the same
|
||||
/// `canonical` sql string
|
||||
///
|
||||
/// For multiple statements, use [`statements_parse_to`].
|
||||
/// Ensures that `sql` parses as a single statement and returns it.
|
||||
/// If non-empty `canonical` SQL representation is provided,
|
||||
/// additionally asserts that parsing `sql` results in the same parse
|
||||
/// tree as parsing `canonical`, and that serializing it back to string
|
||||
/// results in the `canonical` representation.
|
||||
pub fn one_statement_parses_to(&self, sql: &str, canonical: &str) -> Statement {
|
||||
let mut statements = self.parse_sql_statements(sql).expect(sql);
|
||||
let mut statements = self.parse_sql_statements(sql).unwrap();
|
||||
assert_eq!(statements.len(), 1);
|
||||
|
||||
if !canonical.is_empty() && sql != canonical {
|
||||
assert_eq!(self.parse_sql_statements(canonical).unwrap(), statements);
|
||||
}
|
||||
|
||||
let only_statement = statements.pop().unwrap();
|
||||
|
||||
if !canonical.is_empty() {
|
||||
assert_eq!(canonical, only_statement.to_string())
|
||||
}
|
||||
only_statement
|
||||
}
|
||||
|
||||
/// The same as [`one_statement_parses_to`] but it works for a multiple statements
|
||||
pub fn statements_parse_to(&self, sql: &str, canonical: &str) -> Vec<Statement> {
|
||||
let statements = self.parse_sql_statements(sql).expect(sql);
|
||||
if !canonical.is_empty() && sql != canonical {
|
||||
assert_eq!(self.parse_sql_statements(canonical).unwrap(), statements);
|
||||
} else {
|
||||
assert_eq!(
|
||||
sql,
|
||||
statements
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("; ")
|
||||
);
|
||||
}
|
||||
statements
|
||||
/// Ensures that `sql` parses as a single [Statement], and is not modified
|
||||
/// after a serialization round-trip.
|
||||
pub fn verified_stmt(&self, query: &str) -> Statement {
|
||||
self.one_statement_parses_to(query, query)
|
||||
}
|
||||
|
||||
/// Ensures that `sql` parses as an [`Expr`], and that
|
||||
/// re-serializing the parse result produces canonical
|
||||
pub fn expr_parses_to(&self, sql: &str, canonical: &str) -> Expr {
|
||||
let ast = self
|
||||
.run_parser_method(sql, |parser| parser.parse_expr())
|
||||
.unwrap();
|
||||
assert_eq!(canonical, &ast.to_string());
|
||||
ast
|
||||
}
|
||||
|
||||
/// Ensures that `sql` parses as a single [Statement], and that
|
||||
/// re-serializing the parse result produces the same `sql`
|
||||
/// string (is not modified after a serialization round-trip).
|
||||
pub fn verified_stmt(&self, sql: &str) -> Statement {
|
||||
self.one_statement_parses_to(sql, sql)
|
||||
}
|
||||
|
||||
/// Ensures that `sql` parses as a single [Query], and that
|
||||
/// re-serializing the parse result produces the same `sql`
|
||||
/// string (is not modified after a serialization round-trip).
|
||||
/// Ensures that `sql` parses as a single [Query], and is not modified
|
||||
/// after a serialization round-trip.
|
||||
pub fn verified_query(&self, sql: &str) -> Query {
|
||||
match self.verified_stmt(sql) {
|
||||
Statement::Query(query) => *query,
|
||||
|
@ -213,19 +112,8 @@ impl TestedDialects {
|
|||
}
|
||||
}
|
||||
|
||||
/// Ensures that `sql` parses as a single [Query], and that
|
||||
/// re-serializing the parse result matches the given canonical
|
||||
/// sql string.
|
||||
pub fn verified_query_with_canonical(&self, query: &str, canonical: &str) -> Query {
|
||||
match self.one_statement_parses_to(query, canonical) {
|
||||
Statement::Query(query) => *query,
|
||||
_ => panic!("Expected Query"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensures that `sql` parses as a single [Select], and that
|
||||
/// re-serializing the parse result produces the same `sql`
|
||||
/// string (is not modified after a serialization round-trip).
|
||||
/// Ensures that `sql` parses as a single [Select], and is not modified
|
||||
/// after a serialization round-trip.
|
||||
pub fn verified_only_select(&self, query: &str) -> Select {
|
||||
match *self.verified_query(query).body {
|
||||
SetExpr::Select(s) => *s,
|
||||
|
@ -233,84 +121,29 @@ impl TestedDialects {
|
|||
}
|
||||
}
|
||||
|
||||
/// Ensures that `sql` parses as a single [`Select`], and that additionally:
|
||||
///
|
||||
/// 1. parsing `sql` results in the same [`Statement`] as parsing
|
||||
/// `canonical`.
|
||||
///
|
||||
/// 2. re-serializing the result of parsing `sql` produces the same
|
||||
/// `canonical` sql string
|
||||
pub fn verified_only_select_with_canonical(&self, query: &str, canonical: &str) -> Select {
|
||||
let q = match self.one_statement_parses_to(query, canonical) {
|
||||
Statement::Query(query) => *query,
|
||||
_ => panic!("Expected Query"),
|
||||
};
|
||||
match *q.body {
|
||||
SetExpr::Select(s) => *s,
|
||||
_ => panic!("Expected SetExpr::Select"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Ensures that `sql` parses as an [`Expr`], and that
|
||||
/// re-serializing the parse result produces the same `sql`
|
||||
/// string (is not modified after a serialization round-trip).
|
||||
/// Ensures that `sql` parses as an expression, and is not modified
|
||||
/// after a serialization round-trip.
|
||||
pub fn verified_expr(&self, sql: &str) -> Expr {
|
||||
self.expr_parses_to(sql, sql)
|
||||
}
|
||||
|
||||
/// Check that the tokenizer returns the expected tokens for the given SQL.
|
||||
pub fn tokenizes_to(&self, sql: &str, expected: Vec<Token>) {
|
||||
if self.dialects.is_empty() {
|
||||
panic!("No dialects to test");
|
||||
}
|
||||
|
||||
self.dialects.iter().for_each(|dialect| {
|
||||
let mut tokenizer = Tokenizer::new(&**dialect, sql);
|
||||
if let Some(options) = &self.options {
|
||||
tokenizer = tokenizer.with_unescape(options.unescape);
|
||||
}
|
||||
let tokens = tokenizer.tokenize().unwrap();
|
||||
assert_eq!(expected, tokens, "Tokenized differently for {dialect:?}");
|
||||
});
|
||||
let ast = self
|
||||
.run_parser_method(sql, |parser| parser.parse_expr())
|
||||
.unwrap();
|
||||
assert_eq!(sql, &ast.to_string(), "round-tripping without changes");
|
||||
ast
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns all available dialects.
|
||||
pub fn all_dialects() -> TestedDialects {
|
||||
TestedDialects::new(vec![
|
||||
Box::new(GenericDialect {}),
|
||||
Box::new(PostgreSqlDialect {}),
|
||||
Box::new(MsSqlDialect {}),
|
||||
Box::new(AnsiDialect {}),
|
||||
Box::new(SnowflakeDialect {}),
|
||||
Box::new(HiveDialect {}),
|
||||
Box::new(RedshiftSqlDialect {}),
|
||||
Box::new(MySqlDialect {}),
|
||||
Box::new(BigQueryDialect {}),
|
||||
Box::new(SQLiteDialect {}),
|
||||
Box::new(DuckDbDialect {}),
|
||||
Box::new(DatabricksDialect {}),
|
||||
Box::new(ClickHouseDialect {}),
|
||||
])
|
||||
}
|
||||
|
||||
/// Returns all dialects matching the given predicate.
|
||||
pub fn all_dialects_where<F>(predicate: F) -> TestedDialects
|
||||
where
|
||||
F: Fn(&dyn Dialect) -> bool,
|
||||
{
|
||||
let mut dialects = all_dialects();
|
||||
dialects.dialects.retain(|d| predicate(&**d));
|
||||
dialects
|
||||
}
|
||||
|
||||
/// Returns available dialects. The `except` predicate is used
|
||||
/// to filter out specific dialects.
|
||||
pub fn all_dialects_except<F>(except: F) -> TestedDialects
|
||||
where
|
||||
F: Fn(&dyn Dialect) -> bool,
|
||||
{
|
||||
all_dialects_where(|d| !except(d))
|
||||
TestedDialects {
|
||||
dialects: vec![
|
||||
Box::new(GenericDialect {}),
|
||||
Box::new(PostgreSqlDialect {}),
|
||||
Box::new(MsSqlDialect {}),
|
||||
Box::new(AnsiDialect {}),
|
||||
Box::new(SnowflakeDialect {}),
|
||||
Box::new(HiveDialect {}),
|
||||
Box::new(RedshiftSqlDialect {}),
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assert_eq_vec<T: ToString>(expected: &[&str], actual: &[T]) {
|
||||
|
@ -336,41 +169,10 @@ pub fn expr_from_projection(item: &SelectItem) -> &Expr {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn alter_table_op_with_name(stmt: Statement, expected_name: &str) -> AlterTableOperation {
|
||||
match stmt {
|
||||
Statement::AlterTable {
|
||||
name,
|
||||
if_exists,
|
||||
only: is_only,
|
||||
operations,
|
||||
on_cluster: _,
|
||||
location: _,
|
||||
iceberg,
|
||||
} => {
|
||||
assert_eq!(name.to_string(), expected_name);
|
||||
assert!(!if_exists);
|
||||
assert!(!is_only);
|
||||
assert!(!iceberg);
|
||||
only(operations)
|
||||
}
|
||||
_ => panic!("Expected ALTER TABLE statement"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn alter_table_op(stmt: Statement) -> AlterTableOperation {
|
||||
alter_table_op_with_name(stmt, "tab")
|
||||
}
|
||||
|
||||
/// Creates a `Value::Number`, panic'ing if n is not a number
|
||||
pub fn number(n: &str) -> Value {
|
||||
pub fn number(n: &'static str) -> Value {
|
||||
Value::Number(n.parse().unwrap(), false)
|
||||
}
|
||||
|
||||
/// Creates a [Value::SingleQuotedString]
|
||||
pub fn single_quoted_string(s: impl Into<String>) -> Value {
|
||||
Value::SingleQuotedString(s.into())
|
||||
}
|
||||
|
||||
pub fn table_alias(name: impl Into<String>) -> Option<TableAlias> {
|
||||
Some(TableAlias {
|
||||
name: Ident::new(name),
|
||||
|
@ -380,125 +182,16 @@ pub fn table_alias(name: impl Into<String>) -> Option<TableAlias> {
|
|||
|
||||
pub fn table(name: impl Into<String>) -> TableFactor {
|
||||
TableFactor::Table {
|
||||
name: ObjectName::from(vec![Ident::new(name.into())]),
|
||||
name: ObjectName(vec![Ident::new(name.into())]),
|
||||
alias: None,
|
||||
args: None,
|
||||
with_hints: vec![],
|
||||
version: None,
|
||||
partitions: vec![],
|
||||
with_ordinality: false,
|
||||
json_path: None,
|
||||
sample: None,
|
||||
index_hints: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn table_from_name(name: ObjectName) -> TableFactor {
|
||||
TableFactor::Table {
|
||||
name,
|
||||
alias: None,
|
||||
args: None,
|
||||
with_hints: vec![],
|
||||
version: None,
|
||||
partitions: vec![],
|
||||
with_ordinality: false,
|
||||
json_path: None,
|
||||
sample: None,
|
||||
index_hints: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn table_with_alias(name: impl Into<String>, alias: impl Into<String>) -> TableFactor {
|
||||
TableFactor::Table {
|
||||
name: ObjectName::from(vec![Ident::new(name)]),
|
||||
alias: Some(TableAlias {
|
||||
name: Ident::new(alias),
|
||||
columns: vec![],
|
||||
}),
|
||||
args: None,
|
||||
with_hints: vec![],
|
||||
version: None,
|
||||
partitions: vec![],
|
||||
with_ordinality: false,
|
||||
json_path: None,
|
||||
sample: None,
|
||||
index_hints: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn join(relation: TableFactor) -> Join {
|
||||
Join {
|
||||
relation,
|
||||
global: false,
|
||||
join_operator: JoinOperator::Join(JoinConstraint::Natural),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn call(function: &str, args: impl IntoIterator<Item = Expr>) -> Expr {
|
||||
Expr::Function(Function {
|
||||
name: ObjectName::from(vec![Ident::new(function)]),
|
||||
uses_odbc_syntax: false,
|
||||
parameters: FunctionArguments::None,
|
||||
args: FunctionArguments::List(FunctionArgumentList {
|
||||
duplicate_treatment: None,
|
||||
args: args
|
||||
.into_iter()
|
||||
.map(|arg| FunctionArg::Unnamed(FunctionArgExpr::Expr(arg)))
|
||||
.collect(),
|
||||
clauses: vec![],
|
||||
}),
|
||||
filter: None,
|
||||
null_treatment: None,
|
||||
over: None,
|
||||
within_group: vec![],
|
||||
})
|
||||
}
|
||||
|
||||
/// Gets the first index column (mysql calls it a key part) of the first index found in a
|
||||
/// [`Statement::CreateIndex`], [`Statement::CreateTable`], or [`Statement::AlterTable`].
|
||||
pub fn index_column(stmt: Statement) -> Expr {
|
||||
match stmt {
|
||||
Statement::CreateIndex(CreateIndex { columns, .. }) => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
Statement::CreateTable(CreateTable { constraints, .. }) => {
|
||||
match constraints.first().unwrap() {
|
||||
TableConstraint::Index { columns, .. } => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
TableConstraint::Unique { columns, .. } => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
TableConstraint::PrimaryKey { columns, .. } => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
TableConstraint::FulltextOrSpatial { columns, .. } => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
|
||||
}
|
||||
}
|
||||
Statement::AlterTable { operations, .. } => match operations.first().unwrap() {
|
||||
AlterTableOperation::AddConstraint { constraint, .. } => {
|
||||
match constraint {
|
||||
TableConstraint::Index { columns, .. } => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
TableConstraint::Unique { columns, .. } => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
TableConstraint::PrimaryKey { columns, .. } => {
|
||||
columns.first().unwrap().column.expr.clone()
|
||||
}
|
||||
TableConstraint::FulltextOrSpatial {
|
||||
columns,
|
||||
..
|
||||
} => columns.first().unwrap().column.expr.clone(),
|
||||
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
|
||||
}
|
||||
}
|
||||
_ => panic!("Expected a constraint"),
|
||||
},
|
||||
_ => panic!("Expected CREATE INDEX, ALTER TABLE, or CREATE TABLE, got: {stmt:?}"),
|
||||
join_operator: JoinOperator::Inner(JoinConstraint::Natural),
|
||||
}
|
||||
}
|
||||
|
|
3247
src/tokenizer.rs
3247
src/tokenizer.rs
File diff suppressed because it is too large
Load diff
|
@ -1,414 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
use sqlparser::dialect::GenericDialect;
|
||||
use sqlparser::parser::Parser;
|
||||
|
||||
fn prettify(sql: &str) -> String {
|
||||
let ast = Parser::parse_sql(&GenericDialect {}, sql).unwrap();
|
||||
format!("{:#}", ast[0])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_select() {
|
||||
assert_eq!(
|
||||
prettify("SELECT a, b, c FROM my_table WHERE x = 1 AND y = 2"),
|
||||
r#"
|
||||
SELECT
|
||||
a,
|
||||
b,
|
||||
c
|
||||
FROM
|
||||
my_table
|
||||
WHERE
|
||||
x = 1 AND y = 2
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_join() {
|
||||
assert_eq!(
|
||||
prettify("SELECT a FROM table1 JOIN table2 ON table1.id = table2.id"),
|
||||
r#"
|
||||
SELECT
|
||||
a
|
||||
FROM
|
||||
table1
|
||||
JOIN table2 ON table1.id = table2.id
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_subquery() {
|
||||
assert_eq!(
|
||||
prettify("SELECT * FROM (SELECT a, b FROM my_table) AS subquery"),
|
||||
r#"
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
a,
|
||||
b
|
||||
FROM
|
||||
my_table
|
||||
) AS subquery
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_union() {
|
||||
assert_eq!(
|
||||
prettify("SELECT a FROM table1 UNION SELECT b FROM table2"),
|
||||
r#"
|
||||
SELECT
|
||||
a
|
||||
FROM
|
||||
table1
|
||||
UNION
|
||||
SELECT
|
||||
b
|
||||
FROM
|
||||
table2
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_group_by() {
|
||||
assert_eq!(
|
||||
prettify("SELECT a, COUNT(*) FROM my_table GROUP BY a HAVING COUNT(*) > 1"),
|
||||
r#"
|
||||
SELECT
|
||||
a,
|
||||
COUNT(*)
|
||||
FROM
|
||||
my_table
|
||||
GROUP BY
|
||||
a
|
||||
HAVING
|
||||
COUNT(*) > 1
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_cte() {
|
||||
assert_eq!(
|
||||
prettify("WITH cte AS (SELECT a, b FROM my_table) SELECT * FROM cte"),
|
||||
r#"
|
||||
WITH cte AS (
|
||||
SELECT
|
||||
a,
|
||||
b
|
||||
FROM
|
||||
my_table
|
||||
)
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
cte
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_case_when() {
|
||||
assert_eq!(
|
||||
prettify("SELECT CASE WHEN x > 0 THEN 'positive' WHEN x < 0 THEN 'negative' ELSE 'zero' END FROM my_table"),
|
||||
r#"
|
||||
SELECT
|
||||
CASE
|
||||
WHEN x > 0 THEN
|
||||
'positive'
|
||||
WHEN x < 0 THEN
|
||||
'negative'
|
||||
ELSE
|
||||
'zero'
|
||||
END
|
||||
FROM
|
||||
my_table
|
||||
"#.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_window_function() {
|
||||
assert_eq!(
|
||||
prettify("SELECT id, value, ROW_NUMBER() OVER (PARTITION BY category ORDER BY value DESC) as rank FROM my_table"),
|
||||
r#"
|
||||
SELECT
|
||||
id,
|
||||
value,
|
||||
ROW_NUMBER() OVER (
|
||||
PARTITION BY category
|
||||
ORDER BY value DESC
|
||||
) AS rank
|
||||
FROM
|
||||
my_table
|
||||
"#.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_multiline_string() {
|
||||
assert_eq!(
|
||||
prettify("SELECT 'multiline\nstring' AS str"),
|
||||
r#"
|
||||
SELECT
|
||||
'multiline
|
||||
string' AS str
|
||||
"#
|
||||
.trim(),
|
||||
"A literal string with a newline should be kept as is. The contents of the string should not be indented."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_insert_values() {
|
||||
assert_eq!(
|
||||
prettify("INSERT INTO my_table (a, b, c) VALUES (1, 2, 3), (4, 5, 6)"),
|
||||
r#"
|
||||
INSERT INTO my_table (a, b, c)
|
||||
VALUES
|
||||
(1, 2, 3),
|
||||
(4, 5, 6)
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_insert_select() {
|
||||
assert_eq!(
|
||||
prettify("INSERT INTO my_table (a, b) SELECT x, y FROM source_table RETURNING a AS id"),
|
||||
r#"
|
||||
INSERT INTO my_table (a, b)
|
||||
SELECT
|
||||
x,
|
||||
y
|
||||
FROM
|
||||
source_table
|
||||
RETURNING
|
||||
a AS id
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_update() {
|
||||
assert_eq!(
|
||||
prettify("UPDATE my_table SET a = 1, b = 2 WHERE x > 0 RETURNING id, name"),
|
||||
r#"
|
||||
UPDATE my_table
|
||||
SET
|
||||
a = 1,
|
||||
b = 2
|
||||
WHERE
|
||||
x > 0
|
||||
RETURNING
|
||||
id,
|
||||
name
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_delete() {
|
||||
assert_eq!(
|
||||
prettify("DELETE FROM my_table WHERE x > 0 RETURNING id, name"),
|
||||
r#"
|
||||
DELETE FROM
|
||||
my_table
|
||||
WHERE
|
||||
x > 0
|
||||
RETURNING
|
||||
id,
|
||||
name
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
prettify("DELETE table1, table2"),
|
||||
r#"
|
||||
DELETE
|
||||
table1,
|
||||
table2
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_create_table() {
|
||||
assert_eq!(
|
||||
prettify("CREATE TABLE my_table (id INT PRIMARY KEY, name VARCHAR(255) NOT NULL, CONSTRAINT fk_other FOREIGN KEY (id) REFERENCES other_table(id))"),
|
||||
r#"
|
||||
CREATE TABLE my_table (
|
||||
id INT PRIMARY KEY,
|
||||
name VARCHAR(255) NOT NULL,
|
||||
CONSTRAINT fk_other FOREIGN KEY (id) REFERENCES other_table(id)
|
||||
)
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pretty_print_create_view() {
|
||||
assert_eq!(
|
||||
prettify("CREATE VIEW my_view AS SELECT a, b FROM my_table WHERE x > 0"),
|
||||
r#"
|
||||
CREATE VIEW my_view AS
|
||||
SELECT
|
||||
a,
|
||||
b
|
||||
FROM
|
||||
my_table
|
||||
WHERE
|
||||
x > 0
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
|
||||
fn test_pretty_print_create_function() {
|
||||
assert_eq!(
|
||||
prettify("CREATE FUNCTION my_func() RETURNS INT BEGIN SELECT COUNT(*) INTO @count FROM my_table; RETURN @count; END"),
|
||||
r#"
|
||||
CREATE FUNCTION my_func() RETURNS INT
|
||||
BEGIN
|
||||
SELECT COUNT(*) INTO @count FROM my_table;
|
||||
RETURN @count;
|
||||
END
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
|
||||
fn test_pretty_print_json_table() {
|
||||
assert_eq!(
|
||||
prettify("SELECT * FROM JSON_TABLE(@json, '$[*]' COLUMNS (id INT PATH '$.id', name VARCHAR(255) PATH '$.name')) AS jt"),
|
||||
r#"
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
JSON_TABLE(
|
||||
@json,
|
||||
'$[*]' COLUMNS (
|
||||
id INT PATH '$.id',
|
||||
name VARCHAR(255) PATH '$.name'
|
||||
)
|
||||
) AS jt
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
|
||||
fn test_pretty_print_transaction_blocks() {
|
||||
assert_eq!(
|
||||
prettify("BEGIN; UPDATE my_table SET x = 1; COMMIT;"),
|
||||
r#"
|
||||
BEGIN;
|
||||
UPDATE my_table SET x = 1;
|
||||
COMMIT;
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
|
||||
fn test_pretty_print_control_flow() {
|
||||
assert_eq!(
|
||||
prettify("IF x > 0 THEN SELECT 'positive'; ELSE SELECT 'negative'; END IF;"),
|
||||
r#"
|
||||
IF x > 0 THEN
|
||||
SELECT 'positive';
|
||||
ELSE
|
||||
SELECT 'negative';
|
||||
END IF;
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
|
||||
fn test_pretty_print_merge() {
|
||||
assert_eq!(
|
||||
prettify("MERGE INTO target_table t USING source_table s ON t.id = s.id WHEN MATCHED THEN UPDATE SET t.value = s.value WHEN NOT MATCHED THEN INSERT (id, value) VALUES (s.id, s.value)"),
|
||||
r#"
|
||||
MERGE INTO target_table t
|
||||
USING source_table s ON t.id = s.id
|
||||
WHEN MATCHED THEN
|
||||
UPDATE SET t.value = s.value
|
||||
WHEN NOT MATCHED THEN
|
||||
INSERT (id, value) VALUES (s.id, s.value)
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
|
||||
fn test_pretty_print_create_index() {
|
||||
assert_eq!(
|
||||
prettify("CREATE INDEX idx_name ON my_table (column1, column2)"),
|
||||
r#"
|
||||
CREATE INDEX idx_name
|
||||
ON my_table (column1, column2)
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "https://github.com/apache/datafusion-sqlparser-rs/issues/1850"]
|
||||
fn test_pretty_print_explain() {
|
||||
assert_eq!(
|
||||
prettify("EXPLAIN ANALYZE SELECT * FROM my_table WHERE x > 0"),
|
||||
r#"
|
||||
EXPLAIN ANALYZE
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
my_table
|
||||
WHERE
|
||||
x > 0
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
}
|
|
@ -1,21 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
|
||||
select
|
||||
l_returnflag,
|
||||
l_linestatus,
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
create view revenue0 (supplier_no, total_revenue) as
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
|
|
|
@ -1,22 +1,6 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
select
|
||||
s_acctbal,
|
||||
s_name,
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,3 @@
|
|||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
-- using default substitutions
|
||||
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue