mirror of
https://github.com/apache/datafusion-sqlparser-rs.git
synced 2025-07-08 17:35:00 +00:00
Compare commits
33 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
93450cc250 | ||
![]() |
1a33abda63 | ||
![]() |
f2fba48a7a | ||
![]() |
cf9e50474e | ||
![]() |
ed8757f2f0 | ||
![]() |
d2466af20a | ||
![]() |
b0bcc46e22 | ||
![]() |
942d747d89 | ||
![]() |
239e30a97c | ||
![]() |
9020385c02 | ||
![]() |
be2d2f14e7 | ||
![]() |
418b94227a | ||
![]() |
015caca611 | ||
![]() |
a3398223d7 | ||
![]() |
f32a41a004 | ||
![]() |
9ffc546870 | ||
![]() |
abd80f9ecb | ||
![]() |
3bc94234df | ||
![]() |
50c605a471 | ||
![]() |
6c38cdcadb | ||
![]() |
5f2b5fe7be | ||
![]() |
95d16e3b2d | ||
![]() |
1bbc05cdff | ||
![]() |
b2ab0061c1 | ||
![]() |
b9365b3853 | ||
![]() |
44f3be38e5 | ||
![]() |
5d63663bc6 | ||
![]() |
7865de015f | ||
![]() |
1d0dc7cdd8 | ||
![]() |
204d3b484d | ||
![]() |
185a490218 | ||
![]() |
b1b379e570 | ||
![]() |
be30697efb |
36 changed files with 3104 additions and 673 deletions
39
.github/workflows/license.yml
vendored
Normal file
39
.github/workflows/license.yml
vendored
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing,
|
||||||
|
# software distributed under the License is distributed on an
|
||||||
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
# KIND, either express or implied. See the License for the
|
||||||
|
# specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
name: license
|
||||||
|
|
||||||
|
# trigger for all PRs and changes to main
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
rat:
|
||||||
|
name: Release Audit Tool (RAT)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: 3.8
|
||||||
|
- name: Audit licenses
|
||||||
|
run: ./dev/release/run-rat.sh .
|
|
@ -1,7 +1,8 @@
|
||||||
# Files to exclude from the Apache Rat (license) check
|
|
||||||
.gitignore
|
|
||||||
.tool-versions
|
.tool-versions
|
||||||
|
target/*
|
||||||
|
**.gitignore
|
||||||
|
rat.txt
|
||||||
dev/release/rat_exclude_files.txt
|
dev/release/rat_exclude_files.txt
|
||||||
fuzz/.gitignore
|
|
||||||
sqlparser_bench/img/flamegraph.svg
|
sqlparser_bench/img/flamegraph.svg
|
||||||
|
**Cargo.lock
|
||||||
|
filtered_rat.txt
|
||||||
|
|
|
@ -63,7 +63,7 @@ $ cargo run --example cli - [--dialectname]
|
||||||
};
|
};
|
||||||
|
|
||||||
let contents = if filename == "-" {
|
let contents = if filename == "-" {
|
||||||
println!("Parsing from stdin using {:?}", dialect);
|
println!("Parsing from stdin using {dialect:?}");
|
||||||
let mut buf = Vec::new();
|
let mut buf = Vec::new();
|
||||||
stdin()
|
stdin()
|
||||||
.read_to_end(&mut buf)
|
.read_to_end(&mut buf)
|
||||||
|
|
|
@ -45,25 +45,24 @@ fn basic_queries(c: &mut Criterion) {
|
||||||
|
|
||||||
let large_statement = {
|
let large_statement = {
|
||||||
let expressions = (0..1000)
|
let expressions = (0..1000)
|
||||||
.map(|n| format!("FN_{}(COL_{})", n, n))
|
.map(|n| format!("FN_{n}(COL_{n})"))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
let tables = (0..1000)
|
let tables = (0..1000)
|
||||||
.map(|n| format!("TABLE_{}", n))
|
.map(|n| format!("TABLE_{n}"))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(" JOIN ");
|
.join(" JOIN ");
|
||||||
let where_condition = (0..1000)
|
let where_condition = (0..1000)
|
||||||
.map(|n| format!("COL_{} = {}", n, n))
|
.map(|n| format!("COL_{n} = {n}"))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(" OR ");
|
.join(" OR ");
|
||||||
let order_condition = (0..1000)
|
let order_condition = (0..1000)
|
||||||
.map(|n| format!("COL_{} DESC", n))
|
.map(|n| format!("COL_{n} DESC"))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
|
|
||||||
format!(
|
format!(
|
||||||
"SELECT {} FROM {} WHERE {} ORDER BY {}",
|
"SELECT {expressions} FROM {tables} WHERE {where_condition} ORDER BY {order_condition}"
|
||||||
expressions, tables, where_condition, order_condition
|
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -446,6 +446,14 @@ pub enum DataType {
|
||||||
///
|
///
|
||||||
/// [PostgreSQL]: https://www.postgresql.org/docs/9.5/functions-geometry.html
|
/// [PostgreSQL]: https://www.postgresql.org/docs/9.5/functions-geometry.html
|
||||||
GeometricType(GeometricTypeKind),
|
GeometricType(GeometricTypeKind),
|
||||||
|
/// PostgreSQL text search vectors, see [PostgreSQL].
|
||||||
|
///
|
||||||
|
/// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-textsearch.html
|
||||||
|
TsVector,
|
||||||
|
/// PostgreSQL text search query, see [PostgreSQL].
|
||||||
|
///
|
||||||
|
/// [PostgreSQL]: https://www.postgresql.org/docs/17/datatype-textsearch.html
|
||||||
|
TsQuery,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for DataType {
|
impl fmt::Display for DataType {
|
||||||
|
@ -658,7 +666,7 @@ impl fmt::Display for DataType {
|
||||||
}
|
}
|
||||||
DataType::Enum(vals, bits) => {
|
DataType::Enum(vals, bits) => {
|
||||||
match bits {
|
match bits {
|
||||||
Some(bits) => write!(f, "ENUM{}", bits),
|
Some(bits) => write!(f, "ENUM{bits}"),
|
||||||
None => write!(f, "ENUM"),
|
None => write!(f, "ENUM"),
|
||||||
}?;
|
}?;
|
||||||
write!(f, "(")?;
|
write!(f, "(")?;
|
||||||
|
@ -706,16 +714,16 @@ impl fmt::Display for DataType {
|
||||||
}
|
}
|
||||||
// ClickHouse
|
// ClickHouse
|
||||||
DataType::Nullable(data_type) => {
|
DataType::Nullable(data_type) => {
|
||||||
write!(f, "Nullable({})", data_type)
|
write!(f, "Nullable({data_type})")
|
||||||
}
|
}
|
||||||
DataType::FixedString(character_length) => {
|
DataType::FixedString(character_length) => {
|
||||||
write!(f, "FixedString({})", character_length)
|
write!(f, "FixedString({character_length})")
|
||||||
}
|
}
|
||||||
DataType::LowCardinality(data_type) => {
|
DataType::LowCardinality(data_type) => {
|
||||||
write!(f, "LowCardinality({})", data_type)
|
write!(f, "LowCardinality({data_type})")
|
||||||
}
|
}
|
||||||
DataType::Map(key_data_type, value_data_type) => {
|
DataType::Map(key_data_type, value_data_type) => {
|
||||||
write!(f, "Map({}, {})", key_data_type, value_data_type)
|
write!(f, "Map({key_data_type}, {value_data_type})")
|
||||||
}
|
}
|
||||||
DataType::Tuple(fields) => {
|
DataType::Tuple(fields) => {
|
||||||
write!(f, "Tuple({})", display_comma_separated(fields))
|
write!(f, "Tuple({})", display_comma_separated(fields))
|
||||||
|
@ -737,7 +745,9 @@ impl fmt::Display for DataType {
|
||||||
DataType::NamedTable { name, columns } => {
|
DataType::NamedTable { name, columns } => {
|
||||||
write!(f, "{} TABLE ({})", name, display_comma_separated(columns))
|
write!(f, "{} TABLE ({})", name, display_comma_separated(columns))
|
||||||
}
|
}
|
||||||
DataType::GeometricType(kind) => write!(f, "{}", kind),
|
DataType::GeometricType(kind) => write!(f, "{kind}"),
|
||||||
|
DataType::TsVector => write!(f, "TSVECTOR"),
|
||||||
|
DataType::TsQuery => write!(f, "TSQUERY"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -932,7 +942,7 @@ impl fmt::Display for CharacterLength {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
CharacterLength::IntegerLength { length, unit } => {
|
CharacterLength::IntegerLength { length, unit } => {
|
||||||
write!(f, "{}", length)?;
|
write!(f, "{length}")?;
|
||||||
if let Some(unit) = unit {
|
if let Some(unit) = unit {
|
||||||
write!(f, " {unit}")?;
|
write!(f, " {unit}")?;
|
||||||
}
|
}
|
||||||
|
@ -987,7 +997,7 @@ impl fmt::Display for BinaryLength {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
BinaryLength::IntegerLength { length } => {
|
BinaryLength::IntegerLength { length } => {
|
||||||
write!(f, "{}", length)?;
|
write!(f, "{length}")?;
|
||||||
}
|
}
|
||||||
BinaryLength::Max => {
|
BinaryLength::Max => {
|
||||||
write!(f, "MAX")?;
|
write!(f, "MAX")?;
|
||||||
|
|
|
@ -173,7 +173,7 @@ impl fmt::Display for AlterRoleOperation {
|
||||||
in_database,
|
in_database,
|
||||||
} => {
|
} => {
|
||||||
if let Some(database_name) = in_database {
|
if let Some(database_name) = in_database {
|
||||||
write!(f, "IN DATABASE {} ", database_name)?;
|
write!(f, "IN DATABASE {database_name} ")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
match config_value {
|
match config_value {
|
||||||
|
@ -187,7 +187,7 @@ impl fmt::Display for AlterRoleOperation {
|
||||||
in_database,
|
in_database,
|
||||||
} => {
|
} => {
|
||||||
if let Some(database_name) = in_database {
|
if let Some(database_name) = in_database {
|
||||||
write!(f, "IN DATABASE {} ", database_name)?;
|
write!(f, "IN DATABASE {database_name} ")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
match config_name {
|
match config_name {
|
||||||
|
@ -218,15 +218,15 @@ impl fmt::Display for Use {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
f.write_str("USE ")?;
|
f.write_str("USE ")?;
|
||||||
match self {
|
match self {
|
||||||
Use::Catalog(name) => write!(f, "CATALOG {}", name),
|
Use::Catalog(name) => write!(f, "CATALOG {name}"),
|
||||||
Use::Schema(name) => write!(f, "SCHEMA {}", name),
|
Use::Schema(name) => write!(f, "SCHEMA {name}"),
|
||||||
Use::Database(name) => write!(f, "DATABASE {}", name),
|
Use::Database(name) => write!(f, "DATABASE {name}"),
|
||||||
Use::Warehouse(name) => write!(f, "WAREHOUSE {}", name),
|
Use::Warehouse(name) => write!(f, "WAREHOUSE {name}"),
|
||||||
Use::Role(name) => write!(f, "ROLE {}", name),
|
Use::Role(name) => write!(f, "ROLE {name}"),
|
||||||
Use::SecondaryRoles(secondary_roles) => {
|
Use::SecondaryRoles(secondary_roles) => {
|
||||||
write!(f, "SECONDARY ROLES {}", secondary_roles)
|
write!(f, "SECONDARY ROLES {secondary_roles}")
|
||||||
}
|
}
|
||||||
Use::Object(name) => write!(f, "{}", name),
|
Use::Object(name) => write!(f, "{name}"),
|
||||||
Use::Default => write!(f, "DEFAULT"),
|
Use::Default => write!(f, "DEFAULT"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
134
src/ast/ddl.rs
134
src/ast/ddl.rs
|
@ -30,11 +30,11 @@ use sqlparser_derive::{Visit, VisitMut};
|
||||||
|
|
||||||
use crate::ast::value::escape_single_quote_string;
|
use crate::ast::value::escape_single_quote_string;
|
||||||
use crate::ast::{
|
use crate::ast::{
|
||||||
display_comma_separated, display_separated, CommentDef, CreateFunctionBody,
|
display_comma_separated, display_separated, ArgMode, CommentDef, CreateFunctionBody,
|
||||||
CreateFunctionUsing, DataType, Expr, FunctionBehavior, FunctionCalledOnNull,
|
CreateFunctionUsing, DataType, Expr, FunctionBehavior, FunctionCalledOnNull,
|
||||||
FunctionDeterminismSpecifier, FunctionParallel, Ident, MySQLColumnPosition, ObjectName,
|
FunctionDeterminismSpecifier, FunctionParallel, Ident, IndexColumn, MySQLColumnPosition,
|
||||||
OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag, Value,
|
ObjectName, OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag,
|
||||||
ValueWithSpan,
|
Value, ValueWithSpan,
|
||||||
};
|
};
|
||||||
use crate::keywords::Keyword;
|
use crate::keywords::Keyword;
|
||||||
use crate::tokenizer::Token;
|
use crate::tokenizer::Token;
|
||||||
|
@ -57,7 +57,7 @@ impl fmt::Display for ReplicaIdentity {
|
||||||
ReplicaIdentity::None => f.write_str("NONE"),
|
ReplicaIdentity::None => f.write_str("NONE"),
|
||||||
ReplicaIdentity::Full => f.write_str("FULL"),
|
ReplicaIdentity::Full => f.write_str("FULL"),
|
||||||
ReplicaIdentity::Default => f.write_str("DEFAULT"),
|
ReplicaIdentity::Default => f.write_str("DEFAULT"),
|
||||||
ReplicaIdentity::Index(idx) => write!(f, "USING INDEX {}", idx),
|
ReplicaIdentity::Index(idx) => write!(f, "USING INDEX {idx}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -67,8 +67,11 @@ impl fmt::Display for ReplicaIdentity {
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
pub enum AlterTableOperation {
|
pub enum AlterTableOperation {
|
||||||
/// `ADD <table_constraint>`
|
/// `ADD <table_constraint> [NOT VALID]`
|
||||||
AddConstraint(TableConstraint),
|
AddConstraint {
|
||||||
|
constraint: TableConstraint,
|
||||||
|
not_valid: bool,
|
||||||
|
},
|
||||||
/// `ADD [COLUMN] [IF NOT EXISTS] <column_def>`
|
/// `ADD [COLUMN] [IF NOT EXISTS] <column_def>`
|
||||||
AddColumn {
|
AddColumn {
|
||||||
/// `[COLUMN]`.
|
/// `[COLUMN]`.
|
||||||
|
@ -137,10 +140,10 @@ pub enum AlterTableOperation {
|
||||||
name: Ident,
|
name: Ident,
|
||||||
drop_behavior: Option<DropBehavior>,
|
drop_behavior: Option<DropBehavior>,
|
||||||
},
|
},
|
||||||
/// `DROP [ COLUMN ] [ IF EXISTS ] <column_name> [ CASCADE ]`
|
/// `DROP [ COLUMN ] [ IF EXISTS ] <column_name> [ , <column_name>, ... ] [ CASCADE ]`
|
||||||
DropColumn {
|
DropColumn {
|
||||||
has_column_keyword: bool,
|
has_column_keyword: bool,
|
||||||
column_name: Ident,
|
column_names: Vec<Ident>,
|
||||||
if_exists: bool,
|
if_exists: bool,
|
||||||
drop_behavior: Option<DropBehavior>,
|
drop_behavior: Option<DropBehavior>,
|
||||||
},
|
},
|
||||||
|
@ -344,6 +347,10 @@ pub enum AlterTableOperation {
|
||||||
equals: bool,
|
equals: bool,
|
||||||
value: ValueWithSpan,
|
value: ValueWithSpan,
|
||||||
},
|
},
|
||||||
|
/// `VALIDATE CONSTRAINT <name>`
|
||||||
|
ValidateConstraint {
|
||||||
|
name: Ident,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An `ALTER Policy` (`Statement::AlterPolicy`) operation
|
/// An `ALTER Policy` (`Statement::AlterPolicy`) operation
|
||||||
|
@ -450,7 +457,7 @@ pub enum Owner {
|
||||||
impl fmt::Display for Owner {
|
impl fmt::Display for Owner {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Owner::Ident(ident) => write!(f, "{}", ident),
|
Owner::Ident(ident) => write!(f, "{ident}"),
|
||||||
Owner::CurrentRole => write!(f, "CURRENT_ROLE"),
|
Owner::CurrentRole => write!(f, "CURRENT_ROLE"),
|
||||||
Owner::CurrentUser => write!(f, "CURRENT_USER"),
|
Owner::CurrentUser => write!(f, "CURRENT_USER"),
|
||||||
Owner::SessionUser => write!(f, "SESSION_USER"),
|
Owner::SessionUser => write!(f, "SESSION_USER"),
|
||||||
|
@ -494,7 +501,16 @@ impl fmt::Display for AlterTableOperation {
|
||||||
display_separated(new_partitions, " "),
|
display_separated(new_partitions, " "),
|
||||||
ine = if *if_not_exists { " IF NOT EXISTS" } else { "" }
|
ine = if *if_not_exists { " IF NOT EXISTS" } else { "" }
|
||||||
),
|
),
|
||||||
AlterTableOperation::AddConstraint(c) => write!(f, "ADD {c}"),
|
AlterTableOperation::AddConstraint {
|
||||||
|
not_valid,
|
||||||
|
constraint,
|
||||||
|
} => {
|
||||||
|
write!(f, "ADD {constraint}")?;
|
||||||
|
if *not_valid {
|
||||||
|
write!(f, " NOT VALID")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
AlterTableOperation::AddColumn {
|
AlterTableOperation::AddColumn {
|
||||||
column_keyword,
|
column_keyword,
|
||||||
if_not_exists,
|
if_not_exists,
|
||||||
|
@ -525,7 +541,7 @@ impl fmt::Display for AlterTableOperation {
|
||||||
if *if_not_exists {
|
if *if_not_exists {
|
||||||
write!(f, " IF NOT EXISTS")?;
|
write!(f, " IF NOT EXISTS")?;
|
||||||
}
|
}
|
||||||
write!(f, " {} ({})", name, query)
|
write!(f, " {name} ({query})")
|
||||||
}
|
}
|
||||||
AlterTableOperation::Algorithm { equals, algorithm } => {
|
AlterTableOperation::Algorithm { equals, algorithm } => {
|
||||||
write!(
|
write!(
|
||||||
|
@ -540,7 +556,7 @@ impl fmt::Display for AlterTableOperation {
|
||||||
if *if_exists {
|
if *if_exists {
|
||||||
write!(f, " IF EXISTS")?;
|
write!(f, " IF EXISTS")?;
|
||||||
}
|
}
|
||||||
write!(f, " {}", name)
|
write!(f, " {name}")
|
||||||
}
|
}
|
||||||
AlterTableOperation::MaterializeProjection {
|
AlterTableOperation::MaterializeProjection {
|
||||||
if_exists,
|
if_exists,
|
||||||
|
@ -551,9 +567,9 @@ impl fmt::Display for AlterTableOperation {
|
||||||
if *if_exists {
|
if *if_exists {
|
||||||
write!(f, " IF EXISTS")?;
|
write!(f, " IF EXISTS")?;
|
||||||
}
|
}
|
||||||
write!(f, " {}", name)?;
|
write!(f, " {name}")?;
|
||||||
if let Some(partition) = partition {
|
if let Some(partition) = partition {
|
||||||
write!(f, " IN PARTITION {}", partition)?;
|
write!(f, " IN PARTITION {partition}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -566,9 +582,9 @@ impl fmt::Display for AlterTableOperation {
|
||||||
if *if_exists {
|
if *if_exists {
|
||||||
write!(f, " IF EXISTS")?;
|
write!(f, " IF EXISTS")?;
|
||||||
}
|
}
|
||||||
write!(f, " {}", name)?;
|
write!(f, " {name}")?;
|
||||||
if let Some(partition) = partition {
|
if let Some(partition) = partition {
|
||||||
write!(f, " IN PARTITION {}", partition)?;
|
write!(f, " IN PARTITION {partition}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -615,7 +631,7 @@ impl fmt::Display for AlterTableOperation {
|
||||||
AlterTableOperation::DropIndex { name } => write!(f, "DROP INDEX {name}"),
|
AlterTableOperation::DropIndex { name } => write!(f, "DROP INDEX {name}"),
|
||||||
AlterTableOperation::DropColumn {
|
AlterTableOperation::DropColumn {
|
||||||
has_column_keyword,
|
has_column_keyword,
|
||||||
column_name,
|
column_names: column_name,
|
||||||
if_exists,
|
if_exists,
|
||||||
drop_behavior,
|
drop_behavior,
|
||||||
} => write!(
|
} => write!(
|
||||||
|
@ -623,7 +639,7 @@ impl fmt::Display for AlterTableOperation {
|
||||||
"DROP {}{}{}{}",
|
"DROP {}{}{}{}",
|
||||||
if *has_column_keyword { "COLUMN " } else { "" },
|
if *has_column_keyword { "COLUMN " } else { "" },
|
||||||
if *if_exists { "IF EXISTS " } else { "" },
|
if *if_exists { "IF EXISTS " } else { "" },
|
||||||
column_name,
|
display_comma_separated(column_name),
|
||||||
match drop_behavior {
|
match drop_behavior {
|
||||||
None => "",
|
None => "",
|
||||||
Some(DropBehavior::Restrict) => " RESTRICT",
|
Some(DropBehavior::Restrict) => " RESTRICT",
|
||||||
|
@ -772,6 +788,9 @@ impl fmt::Display for AlterTableOperation {
|
||||||
AlterTableOperation::ReplicaIdentity { identity } => {
|
AlterTableOperation::ReplicaIdentity { identity } => {
|
||||||
write!(f, "REPLICA IDENTITY {identity}")
|
write!(f, "REPLICA IDENTITY {identity}")
|
||||||
}
|
}
|
||||||
|
AlterTableOperation::ValidateConstraint { name } => {
|
||||||
|
write!(f, "VALIDATE CONSTRAINT {name}")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -893,7 +912,10 @@ pub enum AlterColumnOperation {
|
||||||
data_type: DataType,
|
data_type: DataType,
|
||||||
/// PostgreSQL specific
|
/// PostgreSQL specific
|
||||||
using: Option<Expr>,
|
using: Option<Expr>,
|
||||||
|
/// Set to true if the statement includes the `SET DATA TYPE` keywords
|
||||||
|
had_set: bool,
|
||||||
},
|
},
|
||||||
|
|
||||||
/// `ADD GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( sequence_options ) ]`
|
/// `ADD GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( sequence_options ) ]`
|
||||||
///
|
///
|
||||||
/// Note: this is a PostgreSQL-specific operation.
|
/// Note: this is a PostgreSQL-specific operation.
|
||||||
|
@ -914,12 +936,19 @@ impl fmt::Display for AlterColumnOperation {
|
||||||
AlterColumnOperation::DropDefault => {
|
AlterColumnOperation::DropDefault => {
|
||||||
write!(f, "DROP DEFAULT")
|
write!(f, "DROP DEFAULT")
|
||||||
}
|
}
|
||||||
AlterColumnOperation::SetDataType { data_type, using } => {
|
AlterColumnOperation::SetDataType {
|
||||||
if let Some(expr) = using {
|
data_type,
|
||||||
write!(f, "SET DATA TYPE {data_type} USING {expr}")
|
using,
|
||||||
} else {
|
had_set,
|
||||||
write!(f, "SET DATA TYPE {data_type}")
|
} => {
|
||||||
|
if *had_set {
|
||||||
|
write!(f, "SET DATA ")?;
|
||||||
}
|
}
|
||||||
|
write!(f, "TYPE {data_type}")?;
|
||||||
|
if let Some(expr) = using {
|
||||||
|
write!(f, " USING {expr}")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
AlterColumnOperation::AddGenerated {
|
AlterColumnOperation::AddGenerated {
|
||||||
generated_as,
|
generated_as,
|
||||||
|
@ -979,7 +1008,7 @@ pub enum TableConstraint {
|
||||||
/// [1]: IndexType
|
/// [1]: IndexType
|
||||||
index_type: Option<IndexType>,
|
index_type: Option<IndexType>,
|
||||||
/// Identifiers of the columns that are unique.
|
/// Identifiers of the columns that are unique.
|
||||||
columns: Vec<Ident>,
|
columns: Vec<IndexColumn>,
|
||||||
index_options: Vec<IndexOption>,
|
index_options: Vec<IndexOption>,
|
||||||
characteristics: Option<ConstraintCharacteristics>,
|
characteristics: Option<ConstraintCharacteristics>,
|
||||||
/// Optional Postgres nulls handling: `[ NULLS [ NOT ] DISTINCT ]`
|
/// Optional Postgres nulls handling: `[ NULLS [ NOT ] DISTINCT ]`
|
||||||
|
@ -1015,7 +1044,7 @@ pub enum TableConstraint {
|
||||||
/// [1]: IndexType
|
/// [1]: IndexType
|
||||||
index_type: Option<IndexType>,
|
index_type: Option<IndexType>,
|
||||||
/// Identifiers of the columns that form the primary key.
|
/// Identifiers of the columns that form the primary key.
|
||||||
columns: Vec<Ident>,
|
columns: Vec<IndexColumn>,
|
||||||
index_options: Vec<IndexOption>,
|
index_options: Vec<IndexOption>,
|
||||||
characteristics: Option<ConstraintCharacteristics>,
|
characteristics: Option<ConstraintCharacteristics>,
|
||||||
},
|
},
|
||||||
|
@ -1060,7 +1089,7 @@ pub enum TableConstraint {
|
||||||
/// [1]: IndexType
|
/// [1]: IndexType
|
||||||
index_type: Option<IndexType>,
|
index_type: Option<IndexType>,
|
||||||
/// Referred column identifier list.
|
/// Referred column identifier list.
|
||||||
columns: Vec<Ident>,
|
columns: Vec<IndexColumn>,
|
||||||
},
|
},
|
||||||
/// MySQLs [fulltext][1] definition. Since the [`SPATIAL`][2] definition is exactly the same,
|
/// MySQLs [fulltext][1] definition. Since the [`SPATIAL`][2] definition is exactly the same,
|
||||||
/// and MySQL displays both the same way, it is part of this definition as well.
|
/// and MySQL displays both the same way, it is part of this definition as well.
|
||||||
|
@ -1083,7 +1112,7 @@ pub enum TableConstraint {
|
||||||
/// Optional index name.
|
/// Optional index name.
|
||||||
opt_index_name: Option<Ident>,
|
opt_index_name: Option<Ident>,
|
||||||
/// Referred column identifier list.
|
/// Referred column identifier list.
|
||||||
columns: Vec<Ident>,
|
columns: Vec<IndexColumn>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1168,7 +1197,7 @@ impl fmt::Display for TableConstraint {
|
||||||
write!(f, " ON UPDATE {action}")?;
|
write!(f, " ON UPDATE {action}")?;
|
||||||
}
|
}
|
||||||
if let Some(characteristics) = characteristics {
|
if let Some(characteristics) = characteristics {
|
||||||
write!(f, " {}", characteristics)?;
|
write!(f, " {characteristics}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1308,7 +1337,7 @@ impl fmt::Display for IndexType {
|
||||||
Self::SPGiST => write!(f, "SPGIST"),
|
Self::SPGiST => write!(f, "SPGIST"),
|
||||||
Self::BRIN => write!(f, "BRIN"),
|
Self::BRIN => write!(f, "BRIN"),
|
||||||
Self::Bloom => write!(f, "BLOOM"),
|
Self::Bloom => write!(f, "BLOOM"),
|
||||||
Self::Custom(name) => write!(f, "{}", name),
|
Self::Custom(name) => write!(f, "{name}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1367,11 +1396,16 @@ impl fmt::Display for NullsDistinctOption {
|
||||||
pub struct ProcedureParam {
|
pub struct ProcedureParam {
|
||||||
pub name: Ident,
|
pub name: Ident,
|
||||||
pub data_type: DataType,
|
pub data_type: DataType,
|
||||||
|
pub mode: Option<ArgMode>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for ProcedureParam {
|
impl fmt::Display for ProcedureParam {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{} {}", self.name, self.data_type)
|
if let Some(mode) = &self.mode {
|
||||||
|
write!(f, "{mode} {} {}", self.name, self.data_type)
|
||||||
|
} else {
|
||||||
|
write!(f, "{} {}", self.name, self.data_type)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1421,17 +1455,41 @@ impl fmt::Display for ColumnDef {
|
||||||
pub struct ViewColumnDef {
|
pub struct ViewColumnDef {
|
||||||
pub name: Ident,
|
pub name: Ident,
|
||||||
pub data_type: Option<DataType>,
|
pub data_type: Option<DataType>,
|
||||||
pub options: Option<Vec<ColumnOption>>,
|
pub options: Option<ColumnOptions>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
|
pub enum ColumnOptions {
|
||||||
|
CommaSeparated(Vec<ColumnOption>),
|
||||||
|
SpaceSeparated(Vec<ColumnOption>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ColumnOptions {
|
||||||
|
pub fn as_slice(&self) -> &[ColumnOption] {
|
||||||
|
match self {
|
||||||
|
ColumnOptions::CommaSeparated(options) => options.as_slice(),
|
||||||
|
ColumnOptions::SpaceSeparated(options) => options.as_slice(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for ViewColumnDef {
|
impl fmt::Display for ViewColumnDef {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}", self.name)?;
|
write!(f, "{}", self.name)?;
|
||||||
if let Some(data_type) = self.data_type.as_ref() {
|
if let Some(data_type) = self.data_type.as_ref() {
|
||||||
write!(f, " {}", data_type)?;
|
write!(f, " {data_type}")?;
|
||||||
}
|
}
|
||||||
if let Some(options) = self.options.as_ref() {
|
if let Some(options) = self.options.as_ref() {
|
||||||
write!(f, " {}", display_comma_separated(options.as_slice()))?;
|
match options {
|
||||||
|
ColumnOptions::CommaSeparated(column_options) => {
|
||||||
|
write!(f, " {}", display_comma_separated(column_options.as_slice()))?;
|
||||||
|
}
|
||||||
|
ColumnOptions::SpaceSeparated(column_options) => {
|
||||||
|
write!(f, " {}", display_separated(column_options.as_slice(), " "))?
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1651,7 +1709,7 @@ pub struct ColumnPolicyProperty {
|
||||||
/// ```
|
/// ```
|
||||||
/// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table
|
/// [Snowflake]: https://docs.snowflake.com/en/sql-reference/sql/create-table
|
||||||
pub with: bool,
|
pub with: bool,
|
||||||
pub policy_name: Ident,
|
pub policy_name: ObjectName,
|
||||||
pub using_columns: Option<Vec<Ident>>,
|
pub using_columns: Option<Vec<Ident>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1816,7 +1874,7 @@ impl fmt::Display for ColumnOption {
|
||||||
} => {
|
} => {
|
||||||
write!(f, "{}", if *is_primary { "PRIMARY KEY" } else { "UNIQUE" })?;
|
write!(f, "{}", if *is_primary { "PRIMARY KEY" } else { "UNIQUE" })?;
|
||||||
if let Some(characteristics) = characteristics {
|
if let Some(characteristics) = characteristics {
|
||||||
write!(f, " {}", characteristics)?;
|
write!(f, " {characteristics}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1838,7 +1896,7 @@ impl fmt::Display for ColumnOption {
|
||||||
write!(f, " ON UPDATE {action}")?;
|
write!(f, " ON UPDATE {action}")?;
|
||||||
}
|
}
|
||||||
if let Some(characteristics) = characteristics {
|
if let Some(characteristics) = characteristics {
|
||||||
write!(f, " {}", characteristics)?;
|
write!(f, " {characteristics}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1898,7 +1956,7 @@ impl fmt::Display for ColumnOption {
|
||||||
write!(f, "{parameters}")
|
write!(f, "{parameters}")
|
||||||
}
|
}
|
||||||
OnConflict(keyword) => {
|
OnConflict(keyword) => {
|
||||||
write!(f, "ON CONFLICT {:?}", keyword)?;
|
write!(f, "ON CONFLICT {keyword:?}")?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
Policy(parameters) => {
|
Policy(parameters) => {
|
||||||
|
|
|
@ -55,7 +55,7 @@ impl Display for IndexColumn {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}", self.column)?;
|
write!(f, "{}", self.column)?;
|
||||||
if let Some(operator_class) = &self.operator_class {
|
if let Some(operator_class) = &self.operator_class {
|
||||||
write!(f, " {}", operator_class)?;
|
write!(f, " {operator_class}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -266,7 +266,7 @@ impl Display for CreateTable {
|
||||||
name = self.name,
|
name = self.name,
|
||||||
)?;
|
)?;
|
||||||
if let Some(on_cluster) = &self.on_cluster {
|
if let Some(on_cluster) = &self.on_cluster {
|
||||||
write!(f, " ON CLUSTER {}", on_cluster)?;
|
write!(f, " ON CLUSTER {on_cluster}")?;
|
||||||
}
|
}
|
||||||
if !self.columns.is_empty() || !self.constraints.is_empty() {
|
if !self.columns.is_empty() || !self.constraints.is_empty() {
|
||||||
f.write_str(" (")?;
|
f.write_str(" (")?;
|
||||||
|
@ -383,15 +383,15 @@ impl Display for CreateTable {
|
||||||
match &self.table_options {
|
match &self.table_options {
|
||||||
options @ CreateTableOptions::With(_)
|
options @ CreateTableOptions::With(_)
|
||||||
| options @ CreateTableOptions::Plain(_)
|
| options @ CreateTableOptions::Plain(_)
|
||||||
| options @ CreateTableOptions::TableProperties(_) => write!(f, " {}", options)?,
|
| options @ CreateTableOptions::TableProperties(_) => write!(f, " {options}")?,
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(primary_key) = &self.primary_key {
|
if let Some(primary_key) = &self.primary_key {
|
||||||
write!(f, " PRIMARY KEY {}", primary_key)?;
|
write!(f, " PRIMARY KEY {primary_key}")?;
|
||||||
}
|
}
|
||||||
if let Some(order_by) = &self.order_by {
|
if let Some(order_by) = &self.order_by {
|
||||||
write!(f, " ORDER BY {}", order_by)?;
|
write!(f, " ORDER BY {order_by}")?;
|
||||||
}
|
}
|
||||||
if let Some(inherits) = &self.inherits {
|
if let Some(inherits) = &self.inherits {
|
||||||
write!(f, " INHERITS ({})", display_comma_separated(inherits))?;
|
write!(f, " INHERITS ({})", display_comma_separated(inherits))?;
|
||||||
|
@ -403,7 +403,7 @@ impl Display for CreateTable {
|
||||||
write!(f, " CLUSTER BY {cluster_by}")?;
|
write!(f, " CLUSTER BY {cluster_by}")?;
|
||||||
}
|
}
|
||||||
if let options @ CreateTableOptions::Options(_) = &self.table_options {
|
if let options @ CreateTableOptions::Options(_) = &self.table_options {
|
||||||
write!(f, " {}", options)?;
|
write!(f, " {options}")?;
|
||||||
}
|
}
|
||||||
if let Some(external_volume) = self.external_volume.as_ref() {
|
if let Some(external_volume) = self.external_volume.as_ref() {
|
||||||
write!(f, " EXTERNAL_VOLUME = '{external_volume}'")?;
|
write!(f, " EXTERNAL_VOLUME = '{external_volume}'")?;
|
||||||
|
|
|
@ -67,7 +67,7 @@ impl fmt::Display for KeyValueOptions {
|
||||||
} else {
|
} else {
|
||||||
f.write_str(" ")?;
|
f.write_str(" ")?;
|
||||||
}
|
}
|
||||||
write!(f, "{}", option)?;
|
write!(f, "{option}")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
458
src/ast/mod.rs
458
src/ast/mod.rs
|
@ -28,6 +28,7 @@ use helpers::{
|
||||||
stmt_data_loading::{FileStagingCommand, StageLoadSelectItemKind},
|
stmt_data_loading::{FileStagingCommand, StageLoadSelectItemKind},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use core::cmp::Ordering;
|
||||||
use core::ops::Deref;
|
use core::ops::Deref;
|
||||||
use core::{
|
use core::{
|
||||||
fmt::{self, Display},
|
fmt::{self, Display},
|
||||||
|
@ -60,13 +61,14 @@ pub use self::ddl::{
|
||||||
AlterColumnOperation, AlterConnectorOwner, AlterIndexOperation, AlterPolicyOperation,
|
AlterColumnOperation, AlterConnectorOwner, AlterIndexOperation, AlterPolicyOperation,
|
||||||
AlterTableAlgorithm, AlterTableLock, AlterTableOperation, AlterType, AlterTypeAddValue,
|
AlterTableAlgorithm, AlterTableLock, AlterTableOperation, AlterType, AlterTypeAddValue,
|
||||||
AlterTypeAddValuePosition, AlterTypeOperation, AlterTypeRename, AlterTypeRenameValue,
|
AlterTypeAddValuePosition, AlterTypeOperation, AlterTypeRename, AlterTypeRenameValue,
|
||||||
ClusteredBy, ColumnDef, ColumnOption, ColumnOptionDef, ColumnPolicy, ColumnPolicyProperty,
|
ClusteredBy, ColumnDef, ColumnOption, ColumnOptionDef, ColumnOptions, ColumnPolicy,
|
||||||
ConstraintCharacteristics, CreateConnector, CreateDomain, CreateFunction, Deduplicate,
|
ColumnPolicyProperty, ConstraintCharacteristics, CreateConnector, CreateDomain, CreateFunction,
|
||||||
DeferrableInitial, DropBehavior, GeneratedAs, GeneratedExpressionMode, IdentityParameters,
|
Deduplicate, DeferrableInitial, DropBehavior, GeneratedAs, GeneratedExpressionMode,
|
||||||
IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind, IdentityPropertyOrder,
|
IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind,
|
||||||
IndexOption, IndexType, KeyOrIndexDisplay, NullsDistinctOption, Owner, Partition,
|
IdentityPropertyOrder, IndexOption, IndexType, KeyOrIndexDisplay, NullsDistinctOption, Owner,
|
||||||
ProcedureParam, ReferentialAction, ReplicaIdentity, TableConstraint, TagsColumnOption,
|
Partition, ProcedureParam, ReferentialAction, ReplicaIdentity, TableConstraint,
|
||||||
UserDefinedTypeCompositeAttributeDef, UserDefinedTypeRepresentation, ViewColumnDef,
|
TagsColumnOption, UserDefinedTypeCompositeAttributeDef, UserDefinedTypeRepresentation,
|
||||||
|
ViewColumnDef,
|
||||||
};
|
};
|
||||||
pub use self::dml::{CreateIndex, CreateTable, Delete, IndexColumn, Insert};
|
pub use self::dml::{CreateIndex, CreateTable, Delete, IndexColumn, Insert};
|
||||||
pub use self::operator::{BinaryOperator, UnaryOperator};
|
pub use self::operator::{BinaryOperator, UnaryOperator};
|
||||||
|
@ -172,7 +174,7 @@ fn format_statement_list(f: &mut fmt::Formatter, statements: &[Statement]) -> fm
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An identifier, decomposed into its value or character data and the quote style.
|
/// An identifier, decomposed into its value or character data and the quote style.
|
||||||
#[derive(Debug, Clone, PartialOrd, Ord)]
|
#[derive(Debug, Clone)]
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
pub struct Ident {
|
pub struct Ident {
|
||||||
|
@ -214,6 +216,35 @@ impl core::hash::Hash for Ident {
|
||||||
|
|
||||||
impl Eq for Ident {}
|
impl Eq for Ident {}
|
||||||
|
|
||||||
|
impl PartialOrd for Ident {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for Ident {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
|
let Ident {
|
||||||
|
value,
|
||||||
|
quote_style,
|
||||||
|
// exhaustiveness check; we ignore spans in ordering
|
||||||
|
span: _,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
let Ident {
|
||||||
|
value: other_value,
|
||||||
|
quote_style: other_quote_style,
|
||||||
|
// exhaustiveness check; we ignore spans in ordering
|
||||||
|
span: _,
|
||||||
|
} = other;
|
||||||
|
|
||||||
|
// First compare by value, then by quote_style
|
||||||
|
value
|
||||||
|
.cmp(other_value)
|
||||||
|
.then_with(|| quote_style.cmp(other_quote_style))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Ident {
|
impl Ident {
|
||||||
/// Create a new identifier with the given value and no quotes and an empty span.
|
/// Create a new identifier with the given value and no quotes and an empty span.
|
||||||
pub fn new<S>(value: S) -> Self
|
pub fn new<S>(value: S) -> Self
|
||||||
|
@ -326,7 +357,7 @@ impl ObjectNamePart {
|
||||||
impl fmt::Display for ObjectNamePart {
|
impl fmt::Display for ObjectNamePart {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
ObjectNamePart::Identifier(ident) => write!(f, "{}", ident),
|
ObjectNamePart::Identifier(ident) => write!(f, "{ident}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -428,14 +459,22 @@ impl fmt::Display for Interval {
|
||||||
pub struct StructField {
|
pub struct StructField {
|
||||||
pub field_name: Option<Ident>,
|
pub field_name: Option<Ident>,
|
||||||
pub field_type: DataType,
|
pub field_type: DataType,
|
||||||
|
/// Struct field options.
|
||||||
|
/// See [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#column_name_and_column_schema)
|
||||||
|
pub options: Option<Vec<SqlOption>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for StructField {
|
impl fmt::Display for StructField {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
if let Some(name) = &self.field_name {
|
if let Some(name) = &self.field_name {
|
||||||
write!(f, "{name} {}", self.field_type)
|
write!(f, "{name} {}", self.field_type)?;
|
||||||
} else {
|
} else {
|
||||||
write!(f, "{}", self.field_type)
|
write!(f, "{}", self.field_type)?;
|
||||||
|
}
|
||||||
|
if let Some(options) = &self.options {
|
||||||
|
write!(f, " OPTIONS({})", display_separated(options, ", "))
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -740,7 +779,7 @@ pub enum Expr {
|
||||||
/// `[ NOT ] IN (SELECT ...)`
|
/// `[ NOT ] IN (SELECT ...)`
|
||||||
InSubquery {
|
InSubquery {
|
||||||
expr: Box<Expr>,
|
expr: Box<Expr>,
|
||||||
subquery: Box<SetExpr>,
|
subquery: Box<Query>,
|
||||||
negated: bool,
|
negated: bool,
|
||||||
},
|
},
|
||||||
/// `[ NOT ] IN UNNEST(array_expression)`
|
/// `[ NOT ] IN UNNEST(array_expression)`
|
||||||
|
@ -770,7 +809,7 @@ pub enum Expr {
|
||||||
any: bool,
|
any: bool,
|
||||||
expr: Box<Expr>,
|
expr: Box<Expr>,
|
||||||
pattern: Box<Expr>,
|
pattern: Box<Expr>,
|
||||||
escape_char: Option<String>,
|
escape_char: Option<Value>,
|
||||||
},
|
},
|
||||||
/// `ILIKE` (case-insensitive `LIKE`)
|
/// `ILIKE` (case-insensitive `LIKE`)
|
||||||
ILike {
|
ILike {
|
||||||
|
@ -780,14 +819,14 @@ pub enum Expr {
|
||||||
any: bool,
|
any: bool,
|
||||||
expr: Box<Expr>,
|
expr: Box<Expr>,
|
||||||
pattern: Box<Expr>,
|
pattern: Box<Expr>,
|
||||||
escape_char: Option<String>,
|
escape_char: Option<Value>,
|
||||||
},
|
},
|
||||||
/// SIMILAR TO regex
|
/// SIMILAR TO regex
|
||||||
SimilarTo {
|
SimilarTo {
|
||||||
negated: bool,
|
negated: bool,
|
||||||
expr: Box<Expr>,
|
expr: Box<Expr>,
|
||||||
pattern: Box<Expr>,
|
pattern: Box<Expr>,
|
||||||
escape_char: Option<String>,
|
escape_char: Option<Value>,
|
||||||
},
|
},
|
||||||
/// MySQL: RLIKE regex or REGEXP regex
|
/// MySQL: RLIKE regex or REGEXP regex
|
||||||
RLike {
|
RLike {
|
||||||
|
@ -957,7 +996,7 @@ pub enum Expr {
|
||||||
data_type: DataType,
|
data_type: DataType,
|
||||||
/// The value of the constant.
|
/// The value of the constant.
|
||||||
/// Hint: you can unwrap the string value using `value.into_string()`.
|
/// Hint: you can unwrap the string value using `value.into_string()`.
|
||||||
value: Value,
|
value: ValueWithSpan,
|
||||||
},
|
},
|
||||||
/// Scalar function call e.g. `LEFT(foo, 5)`
|
/// Scalar function call e.g. `LEFT(foo, 5)`
|
||||||
Function(Function),
|
Function(Function),
|
||||||
|
@ -1085,6 +1124,8 @@ pub enum Expr {
|
||||||
/// [Databricks](https://docs.databricks.com/en/sql/language-manual/sql-ref-lambda-functions.html)
|
/// [Databricks](https://docs.databricks.com/en/sql/language-manual/sql-ref-lambda-functions.html)
|
||||||
/// [DuckDb](https://duckdb.org/docs/sql/functions/lambda.html)
|
/// [DuckDb](https://duckdb.org/docs/sql/functions/lambda.html)
|
||||||
Lambda(LambdaFunction),
|
Lambda(LambdaFunction),
|
||||||
|
/// Checks membership of a value in a JSON array
|
||||||
|
MemberOf(MemberOf),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Expr {
|
impl Expr {
|
||||||
|
@ -1171,8 +1212,8 @@ pub enum AccessExpr {
|
||||||
impl fmt::Display for AccessExpr {
|
impl fmt::Display for AccessExpr {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
AccessExpr::Dot(expr) => write!(f, ".{}", expr),
|
AccessExpr::Dot(expr) => write!(f, ".{expr}"),
|
||||||
AccessExpr::Subscript(subscript) => write!(f, "[{}]", subscript),
|
AccessExpr::Subscript(subscript) => write!(f, "[{subscript}]"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1374,12 +1415,12 @@ impl fmt::Display for Expr {
|
||||||
match self {
|
match self {
|
||||||
Expr::Identifier(s) => write!(f, "{s}"),
|
Expr::Identifier(s) => write!(f, "{s}"),
|
||||||
Expr::Wildcard(_) => f.write_str("*"),
|
Expr::Wildcard(_) => f.write_str("*"),
|
||||||
Expr::QualifiedWildcard(prefix, _) => write!(f, "{}.*", prefix),
|
Expr::QualifiedWildcard(prefix, _) => write!(f, "{prefix}.*"),
|
||||||
Expr::CompoundIdentifier(s) => write!(f, "{}", display_separated(s, ".")),
|
Expr::CompoundIdentifier(s) => write!(f, "{}", display_separated(s, ".")),
|
||||||
Expr::CompoundFieldAccess { root, access_chain } => {
|
Expr::CompoundFieldAccess { root, access_chain } => {
|
||||||
write!(f, "{}", root)?;
|
write!(f, "{root}")?;
|
||||||
for field in access_chain {
|
for field in access_chain {
|
||||||
write!(f, "{}", field)?;
|
write!(f, "{field}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1447,7 +1488,7 @@ impl fmt::Display for Expr {
|
||||||
} => match escape_char {
|
} => match escape_char {
|
||||||
Some(ch) => write!(
|
Some(ch) => write!(
|
||||||
f,
|
f,
|
||||||
"{} {}LIKE {}{} ESCAPE '{}'",
|
"{} {}LIKE {}{} ESCAPE {}",
|
||||||
expr,
|
expr,
|
||||||
if *negated { "NOT " } else { "" },
|
if *negated { "NOT " } else { "" },
|
||||||
if *any { "ANY " } else { "" },
|
if *any { "ANY " } else { "" },
|
||||||
|
@ -1472,7 +1513,7 @@ impl fmt::Display for Expr {
|
||||||
} => match escape_char {
|
} => match escape_char {
|
||||||
Some(ch) => write!(
|
Some(ch) => write!(
|
||||||
f,
|
f,
|
||||||
"{} {}ILIKE {}{} ESCAPE '{}'",
|
"{} {}ILIKE {}{} ESCAPE {}",
|
||||||
expr,
|
expr,
|
||||||
if *negated { "NOT " } else { "" },
|
if *negated { "NOT " } else { "" },
|
||||||
if *any { "ANY" } else { "" },
|
if *any { "ANY" } else { "" },
|
||||||
|
@ -1508,7 +1549,7 @@ impl fmt::Display for Expr {
|
||||||
} => {
|
} => {
|
||||||
let not_ = if *negated { "NOT " } else { "" };
|
let not_ = if *negated { "NOT " } else { "" };
|
||||||
if form.is_none() {
|
if form.is_none() {
|
||||||
write!(f, "{} IS {}NORMALIZED", expr, not_)
|
write!(f, "{expr} IS {not_}NORMALIZED")
|
||||||
} else {
|
} else {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
|
@ -1527,7 +1568,7 @@ impl fmt::Display for Expr {
|
||||||
} => match escape_char {
|
} => match escape_char {
|
||||||
Some(ch) => write!(
|
Some(ch) => write!(
|
||||||
f,
|
f,
|
||||||
"{} {}SIMILAR TO {} ESCAPE '{}'",
|
"{} {}SIMILAR TO {} ESCAPE {}",
|
||||||
expr,
|
expr,
|
||||||
if *negated { "NOT " } else { "" },
|
if *negated { "NOT " } else { "" },
|
||||||
pattern,
|
pattern,
|
||||||
|
@ -1830,7 +1871,7 @@ impl fmt::Display for Expr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::Named { expr, name } => {
|
Expr::Named { expr, name } => {
|
||||||
write!(f, "{} AS {}", expr, name)
|
write!(f, "{expr} AS {name}")
|
||||||
}
|
}
|
||||||
Expr::Dictionary(fields) => {
|
Expr::Dictionary(fields) => {
|
||||||
write!(f, "{{{}}}", display_comma_separated(fields))
|
write!(f, "{{{}}}", display_comma_separated(fields))
|
||||||
|
@ -1873,6 +1914,7 @@ impl fmt::Display for Expr {
|
||||||
}
|
}
|
||||||
Expr::Prior(expr) => write!(f, "PRIOR {expr}"),
|
Expr::Prior(expr) => write!(f, "PRIOR {expr}"),
|
||||||
Expr::Lambda(lambda) => write!(f, "{lambda}"),
|
Expr::Lambda(lambda) => write!(f, "{lambda}"),
|
||||||
|
Expr::MemberOf(member_of) => write!(f, "{member_of}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2386,7 +2428,7 @@ impl fmt::Display for ConditionalStatements {
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
ConditionalStatements::BeginEnd(bes) => write!(f, "{}", bes),
|
ConditionalStatements::BeginEnd(bes) => write!(f, "{bes}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2906,9 +2948,7 @@ impl Display for Set {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"SET {modifier}ROLE {role_name}",
|
"SET {modifier}ROLE {role_name}",
|
||||||
modifier = context_modifier
|
modifier = context_modifier.map(|m| format!("{m}")).unwrap_or_default()
|
||||||
.map(|m| format!("{}", m))
|
|
||||||
.unwrap_or_default()
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
Self::SetSessionParam(kind) => write!(f, "SET {kind}"),
|
Self::SetSessionParam(kind) => write!(f, "SET {kind}"),
|
||||||
|
@ -2941,7 +2981,7 @@ impl Display for Set {
|
||||||
charset_name,
|
charset_name,
|
||||||
collation_name,
|
collation_name,
|
||||||
} => {
|
} => {
|
||||||
write!(f, "SET NAMES {}", charset_name)?;
|
write!(f, "SET NAMES {charset_name}")?;
|
||||||
|
|
||||||
if let Some(collation) = collation_name {
|
if let Some(collation) = collation_name {
|
||||||
f.write_str(" COLLATE ")?;
|
f.write_str(" COLLATE ")?;
|
||||||
|
@ -2964,7 +3004,7 @@ impl Display for Set {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"SET {}{}{} = {}",
|
"SET {}{}{} = {}",
|
||||||
scope.map(|s| format!("{}", s)).unwrap_or_default(),
|
scope.map(|s| format!("{s}")).unwrap_or_default(),
|
||||||
if *hivevar { "HIVEVAR:" } else { "" },
|
if *hivevar { "HIVEVAR:" } else { "" },
|
||||||
variable,
|
variable,
|
||||||
display_comma_separated(values)
|
display_comma_separated(values)
|
||||||
|
@ -2982,6 +3022,36 @@ impl From<Set> for Statement {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A representation of a `WHEN` arm with all the identifiers catched and the statements to execute
|
||||||
|
/// for the arm.
|
||||||
|
///
|
||||||
|
/// Snowflake: <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/exception>
|
||||||
|
/// BigQuery: <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend>
|
||||||
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
|
pub struct ExceptionWhen {
|
||||||
|
pub idents: Vec<Ident>,
|
||||||
|
pub statements: Vec<Statement>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for ExceptionWhen {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"WHEN {idents} THEN",
|
||||||
|
idents = display_separated(&self.idents, " OR ")
|
||||||
|
)?;
|
||||||
|
|
||||||
|
if !self.statements.is_empty() {
|
||||||
|
write!(f, " ")?;
|
||||||
|
format_statement_list(f, &self.statements)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A top-level statement (SELECT, INSERT, CREATE, etc.)
|
/// A top-level statement (SELECT, INSERT, CREATE, etc.)
|
||||||
#[allow(clippy::large_enum_variant)]
|
#[allow(clippy::large_enum_variant)]
|
||||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
|
@ -3248,6 +3318,8 @@ pub enum Statement {
|
||||||
secret_type: Ident,
|
secret_type: Ident,
|
||||||
options: Vec<SecretOption>,
|
options: Vec<SecretOption>,
|
||||||
},
|
},
|
||||||
|
/// A `CREATE SERVER` statement.
|
||||||
|
CreateServer(CreateServerStatement),
|
||||||
/// ```sql
|
/// ```sql
|
||||||
/// CREATE POLICY
|
/// CREATE POLICY
|
||||||
/// ```
|
/// ```
|
||||||
|
@ -3670,17 +3742,20 @@ pub enum Statement {
|
||||||
/// END;
|
/// END;
|
||||||
/// ```
|
/// ```
|
||||||
statements: Vec<Statement>,
|
statements: Vec<Statement>,
|
||||||
/// Statements of an exception clause.
|
/// Exception handling with exception clauses.
|
||||||
/// Example:
|
/// Example:
|
||||||
/// ```sql
|
/// ```sql
|
||||||
/// BEGIN
|
/// EXCEPTION
|
||||||
/// SELECT 1;
|
/// WHEN EXCEPTION_1 THEN
|
||||||
/// EXCEPTION WHEN ERROR THEN
|
/// SELECT 2;
|
||||||
/// SELECT 2;
|
/// WHEN EXCEPTION_2 OR EXCEPTION_3 THEN
|
||||||
/// SELECT 3;
|
/// SELECT 3;
|
||||||
/// END;
|
/// WHEN OTHER THEN
|
||||||
|
/// SELECT 4;
|
||||||
|
/// ```
|
||||||
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend>
|
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend>
|
||||||
exception_statements: Option<Vec<Statement>>,
|
/// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/exception>
|
||||||
|
exception: Option<Vec<ExceptionWhen>>,
|
||||||
/// TRUE if the statement has an `END` keyword.
|
/// TRUE if the statement has an `END` keyword.
|
||||||
has_end_keyword: bool,
|
has_end_keyword: bool,
|
||||||
},
|
},
|
||||||
|
@ -3873,6 +3948,7 @@ pub enum Statement {
|
||||||
or_alter: bool,
|
or_alter: bool,
|
||||||
name: ObjectName,
|
name: ObjectName,
|
||||||
params: Option<Vec<ProcedureParam>>,
|
params: Option<Vec<ProcedureParam>>,
|
||||||
|
language: Option<Ident>,
|
||||||
body: ConditionalStatements,
|
body: ConditionalStatements,
|
||||||
},
|
},
|
||||||
/// ```sql
|
/// ```sql
|
||||||
|
@ -3920,6 +3996,7 @@ pub enum Statement {
|
||||||
with_grant_option: bool,
|
with_grant_option: bool,
|
||||||
as_grantor: Option<Ident>,
|
as_grantor: Option<Ident>,
|
||||||
granted_by: Option<Ident>,
|
granted_by: Option<Ident>,
|
||||||
|
current_grants: Option<CurrentGrantsKind>,
|
||||||
},
|
},
|
||||||
/// ```sql
|
/// ```sql
|
||||||
/// DENY privileges ON object TO grantees
|
/// DENY privileges ON object TO grantees
|
||||||
|
@ -4173,7 +4250,7 @@ pub enum Statement {
|
||||||
/// ```sql
|
/// ```sql
|
||||||
/// NOTIFY channel [ , payload ]
|
/// NOTIFY channel [ , payload ]
|
||||||
/// ```
|
/// ```
|
||||||
/// send a notification event together with an optional “payload” string to channel
|
/// send a notification event together with an optional "payload" string to channel
|
||||||
///
|
///
|
||||||
/// See Postgres <https://www.postgresql.org/docs/current/sql-notify.html>
|
/// See Postgres <https://www.postgresql.org/docs/current/sql-notify.html>
|
||||||
NOTIFY {
|
NOTIFY {
|
||||||
|
@ -4236,6 +4313,28 @@ pub enum Statement {
|
||||||
Return(ReturnStatement),
|
Return(ReturnStatement),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// ```sql
|
||||||
|
/// {COPY | REVOKE} CURRENT GRANTS
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// - [Snowflake](https://docs.snowflake.com/en/sql-reference/sql/grant-ownership#optional-parameters)
|
||||||
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
|
pub enum CurrentGrantsKind {
|
||||||
|
CopyCurrentGrants,
|
||||||
|
RevokeCurrentGrants,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for CurrentGrantsKind {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
CurrentGrantsKind::CopyCurrentGrants => write!(f, "COPY CURRENT GRANTS"),
|
||||||
|
CurrentGrantsKind::RevokeCurrentGrants => write!(f, "REVOKE CURRENT GRANTS"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
|
@ -4332,7 +4431,7 @@ impl fmt::Display for Statement {
|
||||||
write!(f, "{describe_alias} ")?;
|
write!(f, "{describe_alias} ")?;
|
||||||
|
|
||||||
if let Some(format) = hive_format {
|
if let Some(format) = hive_format {
|
||||||
write!(f, "{} ", format)?;
|
write!(f, "{format} ")?;
|
||||||
}
|
}
|
||||||
if *has_table_keyword {
|
if *has_table_keyword {
|
||||||
write!(f, "TABLE ")?;
|
write!(f, "TABLE ")?;
|
||||||
|
@ -4776,6 +4875,7 @@ impl fmt::Display for Statement {
|
||||||
name,
|
name,
|
||||||
or_alter,
|
or_alter,
|
||||||
params,
|
params,
|
||||||
|
language,
|
||||||
body,
|
body,
|
||||||
} => {
|
} => {
|
||||||
write!(
|
write!(
|
||||||
|
@ -4791,6 +4891,10 @@ impl fmt::Display for Statement {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(language) = language {
|
||||||
|
write!(f, " LANGUAGE {language}")?;
|
||||||
|
}
|
||||||
|
|
||||||
write!(f, " AS {body}")
|
write!(f, " AS {body}")
|
||||||
}
|
}
|
||||||
Statement::CreateMacro {
|
Statement::CreateMacro {
|
||||||
|
@ -5099,6 +5203,9 @@ impl fmt::Display for Statement {
|
||||||
write!(f, " )")?;
|
write!(f, " )")?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
Statement::CreateServer(stmt) => {
|
||||||
|
write!(f, "{stmt}")
|
||||||
|
}
|
||||||
Statement::CreatePolicy {
|
Statement::CreatePolicy {
|
||||||
name,
|
name,
|
||||||
table_name,
|
table_name,
|
||||||
|
@ -5163,7 +5270,7 @@ impl fmt::Display for Statement {
|
||||||
if *only {
|
if *only {
|
||||||
write!(f, "ONLY ")?;
|
write!(f, "ONLY ")?;
|
||||||
}
|
}
|
||||||
write!(f, "{name} ", name = name)?;
|
write!(f, "{name} ")?;
|
||||||
if let Some(cluster) = on_cluster {
|
if let Some(cluster) = on_cluster {
|
||||||
write!(f, "ON CLUSTER {cluster} ")?;
|
write!(f, "ON CLUSTER {cluster} ")?;
|
||||||
}
|
}
|
||||||
|
@ -5241,7 +5348,7 @@ impl fmt::Display for Statement {
|
||||||
)?;
|
)?;
|
||||||
if !session_params.options.is_empty() {
|
if !session_params.options.is_empty() {
|
||||||
if *set {
|
if *set {
|
||||||
write!(f, " {}", session_params)?;
|
write!(f, " {session_params}")?;
|
||||||
} else {
|
} else {
|
||||||
let options = session_params
|
let options = session_params
|
||||||
.options
|
.options
|
||||||
|
@ -5275,7 +5382,7 @@ impl fmt::Display for Statement {
|
||||||
if *purge { " PURGE" } else { "" },
|
if *purge { " PURGE" } else { "" },
|
||||||
)?;
|
)?;
|
||||||
if let Some(table_name) = table.as_ref() {
|
if let Some(table_name) = table.as_ref() {
|
||||||
write!(f, " ON {}", table_name)?;
|
write!(f, " ON {table_name}")?;
|
||||||
};
|
};
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -5525,12 +5632,12 @@ impl fmt::Display for Statement {
|
||||||
transaction,
|
transaction,
|
||||||
modifier,
|
modifier,
|
||||||
statements,
|
statements,
|
||||||
exception_statements,
|
exception,
|
||||||
has_end_keyword,
|
has_end_keyword,
|
||||||
} => {
|
} => {
|
||||||
if *syntax_begin {
|
if *syntax_begin {
|
||||||
if let Some(modifier) = *modifier {
|
if let Some(modifier) = *modifier {
|
||||||
write!(f, "BEGIN {}", modifier)?;
|
write!(f, "BEGIN {modifier}")?;
|
||||||
} else {
|
} else {
|
||||||
write!(f, "BEGIN")?;
|
write!(f, "BEGIN")?;
|
||||||
}
|
}
|
||||||
|
@ -5547,11 +5654,10 @@ impl fmt::Display for Statement {
|
||||||
write!(f, " ")?;
|
write!(f, " ")?;
|
||||||
format_statement_list(f, statements)?;
|
format_statement_list(f, statements)?;
|
||||||
}
|
}
|
||||||
if let Some(exception_statements) = exception_statements {
|
if let Some(exception_when) = exception {
|
||||||
write!(f, " EXCEPTION WHEN ERROR THEN")?;
|
write!(f, " EXCEPTION")?;
|
||||||
if !exception_statements.is_empty() {
|
for when in exception_when {
|
||||||
write!(f, " ")?;
|
write!(f, " {when}")?;
|
||||||
format_statement_list(f, exception_statements)?;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if *has_end_keyword {
|
if *has_end_keyword {
|
||||||
|
@ -5567,7 +5673,7 @@ impl fmt::Display for Statement {
|
||||||
if *end_syntax {
|
if *end_syntax {
|
||||||
write!(f, "END")?;
|
write!(f, "END")?;
|
||||||
if let Some(modifier) = *modifier {
|
if let Some(modifier) = *modifier {
|
||||||
write!(f, " {}", modifier)?;
|
write!(f, " {modifier}")?;
|
||||||
}
|
}
|
||||||
if *chain {
|
if *chain {
|
||||||
write!(f, " AND CHAIN")?;
|
write!(f, " AND CHAIN")?;
|
||||||
|
@ -5632,6 +5738,7 @@ impl fmt::Display for Statement {
|
||||||
with_grant_option,
|
with_grant_option,
|
||||||
as_grantor,
|
as_grantor,
|
||||||
granted_by,
|
granted_by,
|
||||||
|
current_grants,
|
||||||
} => {
|
} => {
|
||||||
write!(f, "GRANT {privileges} ")?;
|
write!(f, "GRANT {privileges} ")?;
|
||||||
if let Some(objects) = objects {
|
if let Some(objects) = objects {
|
||||||
|
@ -5641,6 +5748,9 @@ impl fmt::Display for Statement {
|
||||||
if *with_grant_option {
|
if *with_grant_option {
|
||||||
write!(f, " WITH GRANT OPTION")?;
|
write!(f, " WITH GRANT OPTION")?;
|
||||||
}
|
}
|
||||||
|
if let Some(current_grants) = current_grants {
|
||||||
|
write!(f, " {current_grants}")?;
|
||||||
|
}
|
||||||
if let Some(grantor) = as_grantor {
|
if let Some(grantor) = as_grantor {
|
||||||
write!(f, " AS {grantor}")?;
|
write!(f, " AS {grantor}")?;
|
||||||
}
|
}
|
||||||
|
@ -5666,7 +5776,7 @@ impl fmt::Display for Statement {
|
||||||
write!(f, " GRANTED BY {grantor}")?;
|
write!(f, " GRANTED BY {grantor}")?;
|
||||||
}
|
}
|
||||||
if let Some(cascade) = cascade {
|
if let Some(cascade) = cascade {
|
||||||
write!(f, " {}", cascade)?;
|
write!(f, " {cascade}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -5845,13 +5955,13 @@ impl fmt::Display for Statement {
|
||||||
if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" },
|
if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" },
|
||||||
)?;
|
)?;
|
||||||
if !directory_table_params.options.is_empty() {
|
if !directory_table_params.options.is_empty() {
|
||||||
write!(f, " DIRECTORY=({})", directory_table_params)?;
|
write!(f, " DIRECTORY=({directory_table_params})")?;
|
||||||
}
|
}
|
||||||
if !file_format.options.is_empty() {
|
if !file_format.options.is_empty() {
|
||||||
write!(f, " FILE_FORMAT=({})", file_format)?;
|
write!(f, " FILE_FORMAT=({file_format})")?;
|
||||||
}
|
}
|
||||||
if !copy_options.options.is_empty() {
|
if !copy_options.options.is_empty() {
|
||||||
write!(f, " COPY_OPTIONS=({})", copy_options)?;
|
write!(f, " COPY_OPTIONS=({copy_options})")?;
|
||||||
}
|
}
|
||||||
if comment.is_some() {
|
if comment.is_some() {
|
||||||
write!(f, " COMMENT='{}'", comment.as_ref().unwrap())?;
|
write!(f, " COMMENT='{}'", comment.as_ref().unwrap())?;
|
||||||
|
@ -5874,7 +5984,7 @@ impl fmt::Display for Statement {
|
||||||
validation_mode,
|
validation_mode,
|
||||||
partition,
|
partition,
|
||||||
} => {
|
} => {
|
||||||
write!(f, "COPY INTO {}", into)?;
|
write!(f, "COPY INTO {into}")?;
|
||||||
if let Some(into_columns) = into_columns {
|
if let Some(into_columns) = into_columns {
|
||||||
write!(f, " ({})", display_comma_separated(into_columns))?;
|
write!(f, " ({})", display_comma_separated(into_columns))?;
|
||||||
}
|
}
|
||||||
|
@ -5890,12 +6000,12 @@ impl fmt::Display for Statement {
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
if let Some(from_obj_alias) = from_obj_alias {
|
if let Some(from_obj_alias) = from_obj_alias {
|
||||||
write!(f, " AS {}", from_obj_alias)?;
|
write!(f, " AS {from_obj_alias}")?;
|
||||||
}
|
}
|
||||||
write!(f, ")")?;
|
write!(f, ")")?;
|
||||||
} else if let Some(from_obj) = from_obj {
|
} else if let Some(from_obj) = from_obj {
|
||||||
// Standard data load
|
// Standard data load
|
||||||
write!(f, " FROM {}{}", from_obj, stage_params)?;
|
write!(f, " FROM {from_obj}{stage_params}")?;
|
||||||
if let Some(from_obj_alias) = from_obj_alias {
|
if let Some(from_obj_alias) = from_obj_alias {
|
||||||
write!(f, " AS {from_obj_alias}")?;
|
write!(f, " AS {from_obj_alias}")?;
|
||||||
}
|
}
|
||||||
|
@ -5908,24 +6018,24 @@ impl fmt::Display for Statement {
|
||||||
write!(f, " FILES = ('{}')", display_separated(files, "', '"))?;
|
write!(f, " FILES = ('{}')", display_separated(files, "', '"))?;
|
||||||
}
|
}
|
||||||
if let Some(pattern) = pattern {
|
if let Some(pattern) = pattern {
|
||||||
write!(f, " PATTERN = '{}'", pattern)?;
|
write!(f, " PATTERN = '{pattern}'")?;
|
||||||
}
|
}
|
||||||
if let Some(partition) = partition {
|
if let Some(partition) = partition {
|
||||||
write!(f, " PARTITION BY {partition}")?;
|
write!(f, " PARTITION BY {partition}")?;
|
||||||
}
|
}
|
||||||
if !file_format.options.is_empty() {
|
if !file_format.options.is_empty() {
|
||||||
write!(f, " FILE_FORMAT=({})", file_format)?;
|
write!(f, " FILE_FORMAT=({file_format})")?;
|
||||||
}
|
}
|
||||||
if !copy_options.options.is_empty() {
|
if !copy_options.options.is_empty() {
|
||||||
match kind {
|
match kind {
|
||||||
CopyIntoSnowflakeKind::Table => {
|
CopyIntoSnowflakeKind::Table => {
|
||||||
write!(f, " COPY_OPTIONS=({})", copy_options)?
|
write!(f, " COPY_OPTIONS=({copy_options})")?
|
||||||
}
|
}
|
||||||
CopyIntoSnowflakeKind::Location => write!(f, " {copy_options}")?,
|
CopyIntoSnowflakeKind::Location => write!(f, " {copy_options}")?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(validation_mode) = validation_mode {
|
if let Some(validation_mode) = validation_mode {
|
||||||
write!(f, " VALIDATION_MODE = {}", validation_mode)?;
|
write!(f, " VALIDATION_MODE = {validation_mode}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -5971,10 +6081,10 @@ impl fmt::Display for Statement {
|
||||||
} => {
|
} => {
|
||||||
write!(f, "OPTIMIZE TABLE {name}")?;
|
write!(f, "OPTIMIZE TABLE {name}")?;
|
||||||
if let Some(on_cluster) = on_cluster {
|
if let Some(on_cluster) = on_cluster {
|
||||||
write!(f, " ON CLUSTER {on_cluster}", on_cluster = on_cluster)?;
|
write!(f, " ON CLUSTER {on_cluster}")?;
|
||||||
}
|
}
|
||||||
if let Some(partition) = partition {
|
if let Some(partition) = partition {
|
||||||
write!(f, " {partition}", partition = partition)?;
|
write!(f, " {partition}")?;
|
||||||
}
|
}
|
||||||
if *include_final {
|
if *include_final {
|
||||||
write!(f, " FINAL")?;
|
write!(f, " FINAL")?;
|
||||||
|
@ -6101,7 +6211,7 @@ impl fmt::Display for SetAssignment {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"{}{} = {}",
|
"{}{} = {}",
|
||||||
self.scope.map(|s| format!("{}", s)).unwrap_or_default(),
|
self.scope.map(|s| format!("{s}")).unwrap_or_default(),
|
||||||
self.name,
|
self.name,
|
||||||
self.value
|
self.value
|
||||||
)
|
)
|
||||||
|
@ -6830,7 +6940,7 @@ impl fmt::Display for GranteeName {
|
||||||
match self {
|
match self {
|
||||||
GranteeName::ObjectName(name) => name.fmt(f),
|
GranteeName::ObjectName(name) => name.fmt(f),
|
||||||
GranteeName::UserHost { user, host } => {
|
GranteeName::UserHost { user, host } => {
|
||||||
write!(f, "{}@{}", user, host)
|
write!(f, "{user}@{host}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6845,6 +6955,24 @@ pub enum GrantObjects {
|
||||||
AllSequencesInSchema { schemas: Vec<ObjectName> },
|
AllSequencesInSchema { schemas: Vec<ObjectName> },
|
||||||
/// Grant privileges on `ALL TABLES IN SCHEMA <schema_name> [, ...]`
|
/// Grant privileges on `ALL TABLES IN SCHEMA <schema_name> [, ...]`
|
||||||
AllTablesInSchema { schemas: Vec<ObjectName> },
|
AllTablesInSchema { schemas: Vec<ObjectName> },
|
||||||
|
/// Grant privileges on `ALL VIEWS IN SCHEMA <schema_name> [, ...]`
|
||||||
|
AllViewsInSchema { schemas: Vec<ObjectName> },
|
||||||
|
/// Grant privileges on `ALL MATERIALIZED VIEWS IN SCHEMA <schema_name> [, ...]`
|
||||||
|
AllMaterializedViewsInSchema { schemas: Vec<ObjectName> },
|
||||||
|
/// Grant privileges on `ALL EXTERNAL TABLES IN SCHEMA <schema_name> [, ...]`
|
||||||
|
AllExternalTablesInSchema { schemas: Vec<ObjectName> },
|
||||||
|
/// Grant privileges on `FUTURE SCHEMAS IN DATABASE <database_name> [, ...]`
|
||||||
|
FutureSchemasInDatabase { databases: Vec<ObjectName> },
|
||||||
|
/// Grant privileges on `FUTURE TABLES IN SCHEMA <schema_name> [, ...]`
|
||||||
|
FutureTablesInSchema { schemas: Vec<ObjectName> },
|
||||||
|
/// Grant privileges on `FUTURE VIEWS IN SCHEMA <schema_name> [, ...]`
|
||||||
|
FutureViewsInSchema { schemas: Vec<ObjectName> },
|
||||||
|
/// Grant privileges on `FUTURE EXTERNAL TABLES IN SCHEMA <schema_name> [, ...]`
|
||||||
|
FutureExternalTablesInSchema { schemas: Vec<ObjectName> },
|
||||||
|
/// Grant privileges on `FUTURE MATERIALIZED VIEWS IN SCHEMA <schema_name> [, ...]`
|
||||||
|
FutureMaterializedViewsInSchema { schemas: Vec<ObjectName> },
|
||||||
|
/// Grant privileges on `FUTURE SEQUENCES IN SCHEMA <schema_name> [, ...]`
|
||||||
|
FutureSequencesInSchema { schemas: Vec<ObjectName> },
|
||||||
/// Grant privileges on specific databases
|
/// Grant privileges on specific databases
|
||||||
Databases(Vec<ObjectName>),
|
Databases(Vec<ObjectName>),
|
||||||
/// Grant privileges on specific schemas
|
/// Grant privileges on specific schemas
|
||||||
|
@ -6913,6 +7041,69 @@ impl fmt::Display for GrantObjects {
|
||||||
display_comma_separated(schemas)
|
display_comma_separated(schemas)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
GrantObjects::AllExternalTablesInSchema { schemas } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"ALL EXTERNAL TABLES IN SCHEMA {}",
|
||||||
|
display_comma_separated(schemas)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
GrantObjects::AllViewsInSchema { schemas } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"ALL VIEWS IN SCHEMA {}",
|
||||||
|
display_comma_separated(schemas)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
GrantObjects::AllMaterializedViewsInSchema { schemas } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"ALL MATERIALIZED VIEWS IN SCHEMA {}",
|
||||||
|
display_comma_separated(schemas)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
GrantObjects::FutureSchemasInDatabase { databases } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"FUTURE SCHEMAS IN DATABASE {}",
|
||||||
|
display_comma_separated(databases)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
GrantObjects::FutureTablesInSchema { schemas } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"FUTURE TABLES IN SCHEMA {}",
|
||||||
|
display_comma_separated(schemas)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
GrantObjects::FutureExternalTablesInSchema { schemas } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"FUTURE EXTERNAL TABLES IN SCHEMA {}",
|
||||||
|
display_comma_separated(schemas)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
GrantObjects::FutureViewsInSchema { schemas } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"FUTURE VIEWS IN SCHEMA {}",
|
||||||
|
display_comma_separated(schemas)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
GrantObjects::FutureMaterializedViewsInSchema { schemas } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"FUTURE MATERIALIZED VIEWS IN SCHEMA {}",
|
||||||
|
display_comma_separated(schemas)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
GrantObjects::FutureSequencesInSchema { schemas } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"FUTURE SEQUENCES IN SCHEMA {}",
|
||||||
|
display_comma_separated(schemas)
|
||||||
|
)
|
||||||
|
}
|
||||||
GrantObjects::ResourceMonitors(objects) => {
|
GrantObjects::ResourceMonitors(objects) => {
|
||||||
write!(f, "RESOURCE MONITOR {}", display_comma_separated(objects))
|
write!(f, "RESOURCE MONITOR {}", display_comma_separated(objects))
|
||||||
}
|
}
|
||||||
|
@ -7000,7 +7191,7 @@ pub enum AssignmentTarget {
|
||||||
impl fmt::Display for AssignmentTarget {
|
impl fmt::Display for AssignmentTarget {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
AssignmentTarget::ColumnName(column) => write!(f, "{}", column),
|
AssignmentTarget::ColumnName(column) => write!(f, "{column}"),
|
||||||
AssignmentTarget::Tuple(columns) => write!(f, "({})", display_comma_separated(columns)),
|
AssignmentTarget::Tuple(columns) => write!(f, "({})", display_comma_separated(columns)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7245,8 +7436,8 @@ impl fmt::Display for FunctionArguments {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
FunctionArguments::None => Ok(()),
|
FunctionArguments::None => Ok(()),
|
||||||
FunctionArguments::Subquery(query) => write!(f, "({})", query),
|
FunctionArguments::Subquery(query) => write!(f, "({query})"),
|
||||||
FunctionArguments::List(args) => write!(f, "({})", args),
|
FunctionArguments::List(args) => write!(f, "({args})"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7267,7 +7458,7 @@ pub struct FunctionArgumentList {
|
||||||
impl fmt::Display for FunctionArgumentList {
|
impl fmt::Display for FunctionArgumentList {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
if let Some(duplicate_treatment) = self.duplicate_treatment {
|
if let Some(duplicate_treatment) = self.duplicate_treatment {
|
||||||
write!(f, "{} ", duplicate_treatment)?;
|
write!(f, "{duplicate_treatment} ")?;
|
||||||
}
|
}
|
||||||
write!(f, "{}", display_comma_separated(&self.args))?;
|
write!(f, "{}", display_comma_separated(&self.args))?;
|
||||||
if !self.clauses.is_empty() {
|
if !self.clauses.is_empty() {
|
||||||
|
@ -7327,7 +7518,7 @@ impl fmt::Display for FunctionArgumentClause {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment) => {
|
FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment) => {
|
||||||
write!(f, "{}", null_treatment)
|
write!(f, "{null_treatment}")
|
||||||
}
|
}
|
||||||
FunctionArgumentClause::OrderBy(order_by) => {
|
FunctionArgumentClause::OrderBy(order_by) => {
|
||||||
write!(f, "ORDER BY {}", display_comma_separated(order_by))
|
write!(f, "ORDER BY {}", display_comma_separated(order_by))
|
||||||
|
@ -7783,12 +7974,12 @@ pub enum SqlOption {
|
||||||
impl fmt::Display for SqlOption {
|
impl fmt::Display for SqlOption {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
SqlOption::Clustered(c) => write!(f, "{}", c),
|
SqlOption::Clustered(c) => write!(f, "{c}"),
|
||||||
SqlOption::Ident(ident) => {
|
SqlOption::Ident(ident) => {
|
||||||
write!(f, "{}", ident)
|
write!(f, "{ident}")
|
||||||
}
|
}
|
||||||
SqlOption::KeyValue { key: name, value } => {
|
SqlOption::KeyValue { key: name, value } => {
|
||||||
write!(f, "{} = {}", name, value)
|
write!(f, "{name} = {value}")
|
||||||
}
|
}
|
||||||
SqlOption::Partition {
|
SqlOption::Partition {
|
||||||
column_name,
|
column_name,
|
||||||
|
@ -7828,7 +8019,7 @@ impl fmt::Display for SqlOption {
|
||||||
SqlOption::NamedParenthesizedList(value) => {
|
SqlOption::NamedParenthesizedList(value) => {
|
||||||
write!(f, "{} = ", value.key)?;
|
write!(f, "{} = ", value.key)?;
|
||||||
if let Some(key) = &value.name {
|
if let Some(key) = &value.name {
|
||||||
write!(f, "{}", key)?;
|
write!(f, "{key}")?;
|
||||||
}
|
}
|
||||||
if !value.values.is_empty() {
|
if !value.values.is_empty() {
|
||||||
write!(f, "({})", display_comma_separated(&value.values))?
|
write!(f, "({})", display_comma_separated(&value.values))?
|
||||||
|
@ -7871,6 +8062,70 @@ impl fmt::Display for SecretOption {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A `CREATE SERVER` statement.
|
||||||
|
///
|
||||||
|
/// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createserver.html)
|
||||||
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
|
pub struct CreateServerStatement {
|
||||||
|
pub name: ObjectName,
|
||||||
|
pub if_not_exists: bool,
|
||||||
|
pub server_type: Option<Ident>,
|
||||||
|
pub version: Option<Ident>,
|
||||||
|
pub foreign_data_wrapper: ObjectName,
|
||||||
|
pub options: Option<Vec<CreateServerOption>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for CreateServerStatement {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
let CreateServerStatement {
|
||||||
|
name,
|
||||||
|
if_not_exists,
|
||||||
|
server_type,
|
||||||
|
version,
|
||||||
|
foreign_data_wrapper,
|
||||||
|
options,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"CREATE SERVER {if_not_exists}{name} ",
|
||||||
|
if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" },
|
||||||
|
)?;
|
||||||
|
|
||||||
|
if let Some(st) = server_type {
|
||||||
|
write!(f, "TYPE {st} ")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(v) = version {
|
||||||
|
write!(f, "VERSION {v} ")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
write!(f, "FOREIGN DATA WRAPPER {foreign_data_wrapper}")?;
|
||||||
|
|
||||||
|
if let Some(o) = options {
|
||||||
|
write!(f, " OPTIONS ({o})", o = display_comma_separated(o))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
|
pub struct CreateServerOption {
|
||||||
|
pub key: Ident,
|
||||||
|
pub value: Ident,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for CreateServerOption {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{} {}", self.key, self.value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
|
@ -7885,7 +8140,7 @@ impl fmt::Display for AttachDuckDBDatabaseOption {
|
||||||
AttachDuckDBDatabaseOption::ReadOnly(Some(true)) => write!(f, "READ_ONLY true"),
|
AttachDuckDBDatabaseOption::ReadOnly(Some(true)) => write!(f, "READ_ONLY true"),
|
||||||
AttachDuckDBDatabaseOption::ReadOnly(Some(false)) => write!(f, "READ_ONLY false"),
|
AttachDuckDBDatabaseOption::ReadOnly(Some(false)) => write!(f, "READ_ONLY false"),
|
||||||
AttachDuckDBDatabaseOption::ReadOnly(None) => write!(f, "READ_ONLY"),
|
AttachDuckDBDatabaseOption::ReadOnly(None) => write!(f, "READ_ONLY"),
|
||||||
AttachDuckDBDatabaseOption::Type(t) => write!(f, "TYPE {}", t),
|
AttachDuckDBDatabaseOption::Type(t) => write!(f, "TYPE {t}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9198,12 +9453,12 @@ impl Display for RowAccessPolicy {
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
pub struct Tag {
|
pub struct Tag {
|
||||||
pub key: Ident,
|
pub key: ObjectName,
|
||||||
pub value: String,
|
pub value: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Tag {
|
impl Tag {
|
||||||
pub fn new(key: Ident, value: String) -> Self {
|
pub fn new(key: ObjectName, value: String) -> Self {
|
||||||
Self { key, value }
|
Self { key, value }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9408,10 +9663,10 @@ impl fmt::Display for ShowStatementIn {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}", self.clause)?;
|
write!(f, "{}", self.clause)?;
|
||||||
if let Some(parent_type) = &self.parent_type {
|
if let Some(parent_type) = &self.parent_type {
|
||||||
write!(f, " {}", parent_type)?;
|
write!(f, " {parent_type}")?;
|
||||||
}
|
}
|
||||||
if let Some(parent_name) = &self.parent_name {
|
if let Some(parent_name) = &self.parent_name {
|
||||||
write!(f, " {}", parent_name)?;
|
write!(f, " {parent_name}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -9492,7 +9747,7 @@ impl fmt::Display for TableObject {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::TableName(table_name) => write!(f, "{table_name}"),
|
Self::TableName(table_name) => write!(f, "{table_name}"),
|
||||||
Self::TableFunction(func) => write!(f, "FUNCTION {}", func),
|
Self::TableFunction(func) => write!(f, "FUNCTION {func}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9680,7 +9935,7 @@ pub struct ReturnStatement {
|
||||||
impl fmt::Display for ReturnStatement {
|
impl fmt::Display for ReturnStatement {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match &self.value {
|
match &self.value {
|
||||||
Some(ReturnStatementValue::Expr(expr)) => write!(f, "RETURN {}", expr),
|
Some(ReturnStatementValue::Expr(expr)) => write!(f, "RETURN {expr}"),
|
||||||
None => write!(f, "RETURN"),
|
None => write!(f, "RETURN"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9729,8 +9984,31 @@ impl fmt::Display for NullInclusion {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Checks membership of a value in a JSON array
|
||||||
|
///
|
||||||
|
/// Syntax:
|
||||||
|
/// ```sql
|
||||||
|
/// <value> MEMBER OF(<array>)
|
||||||
|
/// ```
|
||||||
|
/// [MySQL](https://dev.mysql.com/doc/refman/8.4/en/json-search-functions.html#operator_member-of)
|
||||||
|
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
|
||||||
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
|
pub struct MemberOf {
|
||||||
|
pub value: Box<Expr>,
|
||||||
|
pub array: Box<Expr>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for MemberOf {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{} MEMBER OF({})", self.value, self.array)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use crate::tokenizer::Location;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -10026,4 +10304,16 @@ mod tests {
|
||||||
test_steps(OneOrManyWithParens::Many(vec![2]), vec![2], 3);
|
test_steps(OneOrManyWithParens::Many(vec![2]), vec![2], 3);
|
||||||
test_steps(OneOrManyWithParens::Many(vec![3, 4]), vec![3, 4], 4);
|
test_steps(OneOrManyWithParens::Many(vec![3, 4]), vec![3, 4], 4);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Tests that the position in the code of an `Ident` does not affect its
|
||||||
|
// ordering.
|
||||||
|
#[test]
|
||||||
|
fn test_ident_ord() {
|
||||||
|
let mut a = Ident::with_span(Span::new(Location::new(1, 1), Location::new(1, 1)), "a");
|
||||||
|
let mut b = Ident::with_span(Span::new(Location::new(2, 2), Location::new(2, 2)), "b");
|
||||||
|
|
||||||
|
assert!(a < b);
|
||||||
|
std::mem::swap(&mut a.span, &mut b.span);
|
||||||
|
assert!(a < b);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
231
src/ast/query.rs
231
src/ast/query.rs
|
@ -1047,7 +1047,7 @@ impl fmt::Display for ConnectBy {
|
||||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||||
pub struct Setting {
|
pub struct Setting {
|
||||||
pub key: Ident,
|
pub key: Ident,
|
||||||
pub value: Value,
|
pub value: Expr,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for Setting {
|
impl fmt::Display for Setting {
|
||||||
|
@ -1183,7 +1183,7 @@ impl fmt::Display for TableIndexHints {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{} {} ", self.hint_type, self.index_type)?;
|
write!(f, "{} {} ", self.hint_type, self.index_type)?;
|
||||||
if let Some(for_clause) = &self.for_clause {
|
if let Some(for_clause) = &self.for_clause {
|
||||||
write!(f, "FOR {} ", for_clause)?;
|
write!(f, "FOR {for_clause} ")?;
|
||||||
}
|
}
|
||||||
write!(f, "({})", display_comma_separated(&self.index_names))
|
write!(f, "({})", display_comma_separated(&self.index_names))
|
||||||
}
|
}
|
||||||
|
@ -1459,7 +1459,7 @@ impl fmt::Display for TableSampleQuantity {
|
||||||
}
|
}
|
||||||
write!(f, "{}", self.value)?;
|
write!(f, "{}", self.value)?;
|
||||||
if let Some(unit) = &self.unit {
|
if let Some(unit) = &self.unit {
|
||||||
write!(f, " {}", unit)?;
|
write!(f, " {unit}")?;
|
||||||
}
|
}
|
||||||
if self.parenthesized {
|
if self.parenthesized {
|
||||||
write!(f, ")")?;
|
write!(f, ")")?;
|
||||||
|
@ -1552,7 +1552,7 @@ impl fmt::Display for TableSampleBucket {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "BUCKET {} OUT OF {}", self.bucket, self.total)?;
|
write!(f, "BUCKET {} OUT OF {}", self.bucket, self.total)?;
|
||||||
if let Some(on) = &self.on {
|
if let Some(on) = &self.on {
|
||||||
write!(f, " ON {}", on)?;
|
write!(f, " ON {on}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1561,19 +1561,19 @@ impl fmt::Display for TableSample {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "{}", self.modifier)?;
|
write!(f, "{}", self.modifier)?;
|
||||||
if let Some(name) = &self.name {
|
if let Some(name) = &self.name {
|
||||||
write!(f, " {}", name)?;
|
write!(f, " {name}")?;
|
||||||
}
|
}
|
||||||
if let Some(quantity) = &self.quantity {
|
if let Some(quantity) = &self.quantity {
|
||||||
write!(f, " {}", quantity)?;
|
write!(f, " {quantity}")?;
|
||||||
}
|
}
|
||||||
if let Some(seed) = &self.seed {
|
if let Some(seed) = &self.seed {
|
||||||
write!(f, " {}", seed)?;
|
write!(f, " {seed}")?;
|
||||||
}
|
}
|
||||||
if let Some(bucket) = &self.bucket {
|
if let Some(bucket) = &self.bucket {
|
||||||
write!(f, " ({})", bucket)?;
|
write!(f, " ({bucket})")?;
|
||||||
}
|
}
|
||||||
if let Some(offset) = &self.offset {
|
if let Some(offset) = &self.offset {
|
||||||
write!(f, " OFFSET {}", offset)?;
|
write!(f, " OFFSET {offset}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1651,7 +1651,7 @@ impl fmt::Display for RowsPerMatch {
|
||||||
RowsPerMatch::AllRows(mode) => {
|
RowsPerMatch::AllRows(mode) => {
|
||||||
write!(f, "ALL ROWS PER MATCH")?;
|
write!(f, "ALL ROWS PER MATCH")?;
|
||||||
if let Some(mode) = mode {
|
if let Some(mode) = mode {
|
||||||
write!(f, " {}", mode)?;
|
write!(f, " {mode}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1777,7 +1777,7 @@ impl fmt::Display for MatchRecognizePattern {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
use MatchRecognizePattern::*;
|
use MatchRecognizePattern::*;
|
||||||
match self {
|
match self {
|
||||||
Symbol(symbol) => write!(f, "{}", symbol),
|
Symbol(symbol) => write!(f, "{symbol}"),
|
||||||
Exclude(symbol) => write!(f, "{{- {symbol} -}}"),
|
Exclude(symbol) => write!(f, "{{- {symbol} -}}"),
|
||||||
Permute(symbols) => write!(f, "PERMUTE({})", display_comma_separated(symbols)),
|
Permute(symbols) => write!(f, "PERMUTE({})", display_comma_separated(symbols)),
|
||||||
Concat(patterns) => write!(f, "{}", display_separated(patterns, " ")),
|
Concat(patterns) => write!(f, "{}", display_separated(patterns, " ")),
|
||||||
|
@ -2148,7 +2148,7 @@ impl fmt::Display for TableAliasColumnDef {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}", self.name)?;
|
write!(f, "{}", self.name)?;
|
||||||
if let Some(ref data_type) = self.data_type {
|
if let Some(ref data_type) = self.data_type {
|
||||||
write!(f, " {}", data_type)?;
|
write!(f, " {data_type}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -2398,7 +2398,7 @@ impl fmt::Display for OrderBy {
|
||||||
write!(f, " {}", display_comma_separated(exprs))?;
|
write!(f, " {}", display_comma_separated(exprs))?;
|
||||||
}
|
}
|
||||||
OrderByKind::All(all) => {
|
OrderByKind::All(all) => {
|
||||||
write!(f, " ALL{}", all)?;
|
write!(f, " ALL{all}")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2429,7 +2429,7 @@ impl fmt::Display for OrderByExpr {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}{}", self.expr, self.options)?;
|
write!(f, "{}{}", self.expr, self.options)?;
|
||||||
if let Some(ref with_fill) = self.with_fill {
|
if let Some(ref with_fill) = self.with_fill {
|
||||||
write!(f, " {}", with_fill)?
|
write!(f, " {with_fill}")?
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -2452,13 +2452,13 @@ impl fmt::Display for WithFill {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "WITH FILL")?;
|
write!(f, "WITH FILL")?;
|
||||||
if let Some(ref from) = self.from {
|
if let Some(ref from) = self.from {
|
||||||
write!(f, " FROM {}", from)?;
|
write!(f, " FROM {from}")?;
|
||||||
}
|
}
|
||||||
if let Some(ref to) = self.to {
|
if let Some(ref to) = self.to {
|
||||||
write!(f, " TO {}", to)?;
|
write!(f, " TO {to}")?;
|
||||||
}
|
}
|
||||||
if let Some(ref step) = self.step {
|
if let Some(ref step) = self.step {
|
||||||
write!(f, " STEP {}", step)?;
|
write!(f, " STEP {step}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -2487,7 +2487,7 @@ impl fmt::Display for InterpolateExpr {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "{}", self.column)?;
|
write!(f, "{}", self.column)?;
|
||||||
if let Some(ref expr) = self.expr {
|
if let Some(ref expr) = self.expr {
|
||||||
write!(f, " AS {}", expr)?;
|
write!(f, " AS {expr}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -2565,7 +2565,7 @@ impl fmt::Display for LimitClause {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
LimitClause::OffsetCommaLimit { offset, limit } => {
|
LimitClause::OffsetCommaLimit { offset, limit } => {
|
||||||
write!(f, " LIMIT {}, {}", offset, limit)
|
write!(f, " LIMIT {offset}, {limit}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2684,6 +2684,79 @@ pub enum PipeOperator {
|
||||||
/// Syntax: `|> TABLESAMPLE SYSTEM (10 PERCENT)
|
/// Syntax: `|> TABLESAMPLE SYSTEM (10 PERCENT)
|
||||||
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#tablesample_pipe_operator>
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#tablesample_pipe_operator>
|
||||||
TableSample { sample: Box<TableSample> },
|
TableSample { sample: Box<TableSample> },
|
||||||
|
/// Renames columns in the input table.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> RENAME old_name AS new_name, ...`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#rename_pipe_operator>
|
||||||
|
Rename { mappings: Vec<IdentWithAlias> },
|
||||||
|
/// Combines the input table with one or more tables using UNION.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> UNION [ALL|DISTINCT] (<query>), (<query>), ...`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#union_pipe_operator>
|
||||||
|
Union {
|
||||||
|
set_quantifier: SetQuantifier,
|
||||||
|
queries: Vec<Query>,
|
||||||
|
},
|
||||||
|
/// Returns only the rows that are present in both the input table and the specified tables.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> INTERSECT [DISTINCT] (<query>), (<query>), ...`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#intersect_pipe_operator>
|
||||||
|
Intersect {
|
||||||
|
set_quantifier: SetQuantifier,
|
||||||
|
queries: Vec<Query>,
|
||||||
|
},
|
||||||
|
/// Returns only the rows that are present in the input table but not in the specified tables.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> EXCEPT DISTINCT (<query>), (<query>), ...`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#except_pipe_operator>
|
||||||
|
Except {
|
||||||
|
set_quantifier: SetQuantifier,
|
||||||
|
queries: Vec<Query>,
|
||||||
|
},
|
||||||
|
/// Calls a table function or procedure that returns a table.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> CALL function_name(args) [AS alias]`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#call_pipe_operator>
|
||||||
|
Call {
|
||||||
|
function: Function,
|
||||||
|
alias: Option<Ident>,
|
||||||
|
},
|
||||||
|
/// Pivots data from rows to columns.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> PIVOT(aggregate_function(column) FOR pivot_column IN (value1, value2, ...)) [AS alias]`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#pivot_pipe_operator>
|
||||||
|
Pivot {
|
||||||
|
aggregate_functions: Vec<ExprWithAlias>,
|
||||||
|
value_column: Vec<Ident>,
|
||||||
|
value_source: PivotValueSource,
|
||||||
|
alias: Option<Ident>,
|
||||||
|
},
|
||||||
|
/// The `UNPIVOT` pipe operator transforms columns into rows.
|
||||||
|
///
|
||||||
|
/// Syntax:
|
||||||
|
/// ```sql
|
||||||
|
/// |> UNPIVOT(value_column FOR name_column IN (column1, column2, ...)) [alias]
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#unpivot_pipe_operator>
|
||||||
|
Unpivot {
|
||||||
|
value_column: Ident,
|
||||||
|
name_column: Ident,
|
||||||
|
unpivot_columns: Vec<Ident>,
|
||||||
|
alias: Option<Ident>,
|
||||||
|
},
|
||||||
|
/// Joins the input table with another table.
|
||||||
|
///
|
||||||
|
/// Syntax: `|> [JOIN_TYPE] JOIN <table> [alias] ON <condition>` or `|> [JOIN_TYPE] JOIN <table> [alias] USING (<columns>)`
|
||||||
|
///
|
||||||
|
/// See more at <https://cloud.google.com/bigquery/docs/reference/standard-sql/pipe-syntax#join_pipe_operator>
|
||||||
|
Join(Join),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for PipeOperator {
|
impl fmt::Display for PipeOperator {
|
||||||
|
@ -2702,12 +2775,12 @@ impl fmt::Display for PipeOperator {
|
||||||
write!(f, "DROP {}", display_comma_separated(columns.as_slice()))
|
write!(f, "DROP {}", display_comma_separated(columns.as_slice()))
|
||||||
}
|
}
|
||||||
PipeOperator::As { alias } => {
|
PipeOperator::As { alias } => {
|
||||||
write!(f, "AS {}", alias)
|
write!(f, "AS {alias}")
|
||||||
}
|
}
|
||||||
PipeOperator::Limit { expr, offset } => {
|
PipeOperator::Limit { expr, offset } => {
|
||||||
write!(f, "LIMIT {}", expr)?;
|
write!(f, "LIMIT {expr}")?;
|
||||||
if let Some(offset) = offset {
|
if let Some(offset) = offset {
|
||||||
write!(f, " OFFSET {}", offset)?;
|
write!(f, " OFFSET {offset}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -2730,16 +2803,96 @@ impl fmt::Display for PipeOperator {
|
||||||
}
|
}
|
||||||
|
|
||||||
PipeOperator::Where { expr } => {
|
PipeOperator::Where { expr } => {
|
||||||
write!(f, "WHERE {}", expr)
|
write!(f, "WHERE {expr}")
|
||||||
}
|
}
|
||||||
PipeOperator::OrderBy { exprs } => {
|
PipeOperator::OrderBy { exprs } => {
|
||||||
write!(f, "ORDER BY {}", display_comma_separated(exprs.as_slice()))
|
write!(f, "ORDER BY {}", display_comma_separated(exprs.as_slice()))
|
||||||
}
|
}
|
||||||
|
|
||||||
PipeOperator::TableSample { sample } => {
|
PipeOperator::TableSample { sample } => {
|
||||||
write!(f, "{}", sample)
|
write!(f, "{sample}")
|
||||||
|
}
|
||||||
|
PipeOperator::Rename { mappings } => {
|
||||||
|
write!(f, "RENAME {}", display_comma_separated(mappings))
|
||||||
|
}
|
||||||
|
PipeOperator::Union {
|
||||||
|
set_quantifier,
|
||||||
|
queries,
|
||||||
|
} => Self::fmt_set_operation(f, "UNION", set_quantifier, queries),
|
||||||
|
PipeOperator::Intersect {
|
||||||
|
set_quantifier,
|
||||||
|
queries,
|
||||||
|
} => Self::fmt_set_operation(f, "INTERSECT", set_quantifier, queries),
|
||||||
|
PipeOperator::Except {
|
||||||
|
set_quantifier,
|
||||||
|
queries,
|
||||||
|
} => Self::fmt_set_operation(f, "EXCEPT", set_quantifier, queries),
|
||||||
|
PipeOperator::Call { function, alias } => {
|
||||||
|
write!(f, "CALL {function}")?;
|
||||||
|
Self::fmt_optional_alias(f, alias)
|
||||||
|
}
|
||||||
|
PipeOperator::Pivot {
|
||||||
|
aggregate_functions,
|
||||||
|
value_column,
|
||||||
|
value_source,
|
||||||
|
alias,
|
||||||
|
} => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"PIVOT({} FOR {} IN ({}))",
|
||||||
|
display_comma_separated(aggregate_functions),
|
||||||
|
Expr::CompoundIdentifier(value_column.to_vec()),
|
||||||
|
value_source
|
||||||
|
)?;
|
||||||
|
Self::fmt_optional_alias(f, alias)
|
||||||
|
}
|
||||||
|
PipeOperator::Unpivot {
|
||||||
|
value_column,
|
||||||
|
name_column,
|
||||||
|
unpivot_columns,
|
||||||
|
alias,
|
||||||
|
} => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"UNPIVOT({} FOR {} IN ({}))",
|
||||||
|
value_column,
|
||||||
|
name_column,
|
||||||
|
display_comma_separated(unpivot_columns)
|
||||||
|
)?;
|
||||||
|
Self::fmt_optional_alias(f, alias)
|
||||||
|
}
|
||||||
|
PipeOperator::Join(join) => write!(f, "{join}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PipeOperator {
|
||||||
|
/// Helper function to format optional alias for pipe operators
|
||||||
|
fn fmt_optional_alias(f: &mut fmt::Formatter<'_>, alias: &Option<Ident>) -> fmt::Result {
|
||||||
|
if let Some(alias) = alias {
|
||||||
|
write!(f, " AS {alias}")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Helper function to format set operations (UNION, INTERSECT, EXCEPT) with queries
|
||||||
|
fn fmt_set_operation(
|
||||||
|
f: &mut fmt::Formatter<'_>,
|
||||||
|
operation: &str,
|
||||||
|
set_quantifier: &SetQuantifier,
|
||||||
|
queries: &[Query],
|
||||||
|
) -> fmt::Result {
|
||||||
|
write!(f, "{operation}")?;
|
||||||
|
match set_quantifier {
|
||||||
|
SetQuantifier::None => {}
|
||||||
|
_ => {
|
||||||
|
write!(f, " {set_quantifier}")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
write!(f, " ")?;
|
||||||
|
let parenthesized_queries: Vec<String> =
|
||||||
|
queries.iter().map(|query| format!("({query})")).collect();
|
||||||
|
write!(f, "{}", display_comma_separated(&parenthesized_queries))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3016,7 +3169,7 @@ pub enum FormatClause {
|
||||||
impl fmt::Display for FormatClause {
|
impl fmt::Display for FormatClause {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
FormatClause::Identifier(ident) => write!(f, "FORMAT {}", ident),
|
FormatClause::Identifier(ident) => write!(f, "FORMAT {ident}"),
|
||||||
FormatClause::Null => write!(f, "FORMAT NULL"),
|
FormatClause::Null => write!(f, "FORMAT NULL"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3078,9 +3231,9 @@ impl fmt::Display for ForClause {
|
||||||
without_array_wrapper,
|
without_array_wrapper,
|
||||||
} => {
|
} => {
|
||||||
write!(f, "FOR JSON ")?;
|
write!(f, "FOR JSON ")?;
|
||||||
write!(f, "{}", for_json)?;
|
write!(f, "{for_json}")?;
|
||||||
if let Some(root) = root {
|
if let Some(root) = root {
|
||||||
write!(f, ", ROOT('{}')", root)?;
|
write!(f, ", ROOT('{root}')")?;
|
||||||
}
|
}
|
||||||
if *include_null_values {
|
if *include_null_values {
|
||||||
write!(f, ", INCLUDE_NULL_VALUES")?;
|
write!(f, ", INCLUDE_NULL_VALUES")?;
|
||||||
|
@ -3098,7 +3251,7 @@ impl fmt::Display for ForClause {
|
||||||
r#type,
|
r#type,
|
||||||
} => {
|
} => {
|
||||||
write!(f, "FOR XML ")?;
|
write!(f, "FOR XML ")?;
|
||||||
write!(f, "{}", for_xml)?;
|
write!(f, "{for_xml}")?;
|
||||||
if *binary_base64 {
|
if *binary_base64 {
|
||||||
write!(f, ", BINARY BASE64")?;
|
write!(f, ", BINARY BASE64")?;
|
||||||
}
|
}
|
||||||
|
@ -3106,7 +3259,7 @@ impl fmt::Display for ForClause {
|
||||||
write!(f, ", TYPE")?;
|
write!(f, ", TYPE")?;
|
||||||
}
|
}
|
||||||
if let Some(root) = root {
|
if let Some(root) = root {
|
||||||
write!(f, ", ROOT('{}')", root)?;
|
write!(f, ", ROOT('{root}')")?;
|
||||||
}
|
}
|
||||||
if *elements {
|
if *elements {
|
||||||
write!(f, ", ELEMENTS")?;
|
write!(f, ", ELEMENTS")?;
|
||||||
|
@ -3133,7 +3286,7 @@ impl fmt::Display for ForXml {
|
||||||
ForXml::Raw(root) => {
|
ForXml::Raw(root) => {
|
||||||
write!(f, "RAW")?;
|
write!(f, "RAW")?;
|
||||||
if let Some(root) = root {
|
if let Some(root) = root {
|
||||||
write!(f, "('{}')", root)?;
|
write!(f, "('{root}')")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -3142,7 +3295,7 @@ impl fmt::Display for ForXml {
|
||||||
ForXml::Path(root) => {
|
ForXml::Path(root) => {
|
||||||
write!(f, "PATH")?;
|
write!(f, "PATH")?;
|
||||||
if let Some(root) = root {
|
if let Some(root) = root {
|
||||||
write!(f, "('{}')", root)?;
|
write!(f, "('{root}')")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -3205,7 +3358,7 @@ impl fmt::Display for JsonTableColumn {
|
||||||
JsonTableColumn::Named(json_table_named_column) => {
|
JsonTableColumn::Named(json_table_named_column) => {
|
||||||
write!(f, "{json_table_named_column}")
|
write!(f, "{json_table_named_column}")
|
||||||
}
|
}
|
||||||
JsonTableColumn::ForOrdinality(ident) => write!(f, "{} FOR ORDINALITY", ident),
|
JsonTableColumn::ForOrdinality(ident) => write!(f, "{ident} FOR ORDINALITY"),
|
||||||
JsonTableColumn::Nested(json_table_nested_column) => {
|
JsonTableColumn::Nested(json_table_nested_column) => {
|
||||||
write!(f, "{json_table_nested_column}")
|
write!(f, "{json_table_nested_column}")
|
||||||
}
|
}
|
||||||
|
@ -3271,10 +3424,10 @@ impl fmt::Display for JsonTableNamedColumn {
|
||||||
self.path
|
self.path
|
||||||
)?;
|
)?;
|
||||||
if let Some(on_empty) = &self.on_empty {
|
if let Some(on_empty) = &self.on_empty {
|
||||||
write!(f, " {} ON EMPTY", on_empty)?;
|
write!(f, " {on_empty} ON EMPTY")?;
|
||||||
}
|
}
|
||||||
if let Some(on_error) = &self.on_error {
|
if let Some(on_error) = &self.on_error {
|
||||||
write!(f, " {} ON ERROR", on_error)?;
|
write!(f, " {on_error} ON ERROR")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -3296,7 +3449,7 @@ impl fmt::Display for JsonTableColumnErrorHandling {
|
||||||
match self {
|
match self {
|
||||||
JsonTableColumnErrorHandling::Null => write!(f, "NULL"),
|
JsonTableColumnErrorHandling::Null => write!(f, "NULL"),
|
||||||
JsonTableColumnErrorHandling::Default(json_string) => {
|
JsonTableColumnErrorHandling::Default(json_string) => {
|
||||||
write!(f, "DEFAULT {}", json_string)
|
write!(f, "DEFAULT {json_string}")
|
||||||
}
|
}
|
||||||
JsonTableColumnErrorHandling::Error => write!(f, "ERROR"),
|
JsonTableColumnErrorHandling::Error => write!(f, "ERROR"),
|
||||||
}
|
}
|
||||||
|
@ -3429,12 +3582,12 @@ impl fmt::Display for XmlTableColumn {
|
||||||
default,
|
default,
|
||||||
nullable,
|
nullable,
|
||||||
} => {
|
} => {
|
||||||
write!(f, " {}", r#type)?;
|
write!(f, " {type}")?;
|
||||||
if let Some(p) = path {
|
if let Some(p) = path {
|
||||||
write!(f, " PATH {}", p)?;
|
write!(f, " PATH {p}")?;
|
||||||
}
|
}
|
||||||
if let Some(d) = default {
|
if let Some(d) = default {
|
||||||
write!(f, " DEFAULT {}", d)?;
|
write!(f, " DEFAULT {d}")?;
|
||||||
}
|
}
|
||||||
if !*nullable {
|
if !*nullable {
|
||||||
write!(f, " NOT NULL")?;
|
write!(f, " NOT NULL")?;
|
||||||
|
@ -3465,7 +3618,7 @@ impl fmt::Display for XmlPassingArgument {
|
||||||
}
|
}
|
||||||
write!(f, "{}", self.expr)?;
|
write!(f, "{}", self.expr)?;
|
||||||
if let Some(alias) = &self.alias {
|
if let Some(alias) = &self.alias {
|
||||||
write!(f, " AS {}", alias)?;
|
write!(f, " AS {alias}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
// specific language governing permissions and limitations
|
// specific language governing permissions and limitations
|
||||||
// under the License.
|
// under the License.
|
||||||
|
|
||||||
use crate::ast::query::SelectItemQualifiedWildcardKind;
|
use crate::ast::{query::SelectItemQualifiedWildcardKind, ColumnOptions};
|
||||||
use core::iter;
|
use core::iter;
|
||||||
|
|
||||||
use crate::tokenizer::Span;
|
use crate::tokenizer::Span;
|
||||||
|
@ -28,16 +28,17 @@ use super::{
|
||||||
ConstraintCharacteristics, CopySource, CreateIndex, CreateTable, CreateTableOptions, Cte,
|
ConstraintCharacteristics, CopySource, CreateIndex, CreateTable, CreateTableOptions, Cte,
|
||||||
Delete, DoUpdate, ExceptSelectItem, ExcludeSelectItem, Expr, ExprWithAlias, Fetch, FromTable,
|
Delete, DoUpdate, ExceptSelectItem, ExcludeSelectItem, Expr, ExprWithAlias, Fetch, FromTable,
|
||||||
Function, FunctionArg, FunctionArgExpr, FunctionArgumentClause, FunctionArgumentList,
|
Function, FunctionArg, FunctionArgExpr, FunctionArgumentClause, FunctionArgumentList,
|
||||||
FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, Insert, Interpolate,
|
FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, IndexColumn, Insert,
|
||||||
InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem, LateralView,
|
Interpolate, InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem,
|
||||||
LimitClause, MatchRecognizePattern, Measure, NamedParenthesizedList, NamedWindowDefinition,
|
LateralView, LimitClause, MatchRecognizePattern, Measure, NamedParenthesizedList,
|
||||||
ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction, OnInsert, OpenStatement,
|
NamedWindowDefinition, ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction,
|
||||||
OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource, ProjectionSelect, Query,
|
OnInsert, OpenStatement, OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource,
|
||||||
RaiseStatement, RaiseStatementValue, ReferentialAction, RenameSelectItem, ReplaceSelectElement,
|
ProjectionSelect, Query, RaiseStatement, RaiseStatementValue, ReferentialAction,
|
||||||
ReplaceSelectItem, Select, SelectInto, SelectItem, SetExpr, SqlOption, Statement, Subscript,
|
RenameSelectItem, ReplaceSelectElement, ReplaceSelectItem, Select, SelectInto, SelectItem,
|
||||||
SymbolDefinition, TableAlias, TableAliasColumnDef, TableConstraint, TableFactor, TableObject,
|
SetExpr, SqlOption, Statement, Subscript, SymbolDefinition, TableAlias, TableAliasColumnDef,
|
||||||
TableOptionsClustered, TableWithJoins, UpdateTableFromKind, Use, Value, Values, ViewColumnDef,
|
TableConstraint, TableFactor, TableObject, TableOptionsClustered, TableWithJoins,
|
||||||
WhileStatement, WildcardAdditionalOptions, With, WithFill,
|
UpdateTableFromKind, Use, Value, Values, ViewColumnDef, WhileStatement,
|
||||||
|
WildcardAdditionalOptions, With, WithFill,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Given an iterator of spans, return the [Span::union] of all spans.
|
/// Given an iterator of spans, return the [Span::union] of all spans.
|
||||||
|
@ -422,6 +423,7 @@ impl Spanned for Statement {
|
||||||
Statement::CreateIndex(create_index) => create_index.span(),
|
Statement::CreateIndex(create_index) => create_index.span(),
|
||||||
Statement::CreateRole { .. } => Span::empty(),
|
Statement::CreateRole { .. } => Span::empty(),
|
||||||
Statement::CreateSecret { .. } => Span::empty(),
|
Statement::CreateSecret { .. } => Span::empty(),
|
||||||
|
Statement::CreateServer { .. } => Span::empty(),
|
||||||
Statement::CreateConnector { .. } => Span::empty(),
|
Statement::CreateConnector { .. } => Span::empty(),
|
||||||
Statement::AlterTable {
|
Statement::AlterTable {
|
||||||
name,
|
name,
|
||||||
|
@ -650,7 +652,7 @@ impl Spanned for TableConstraint {
|
||||||
name.iter()
|
name.iter()
|
||||||
.map(|i| i.span)
|
.map(|i| i.span)
|
||||||
.chain(index_name.iter().map(|i| i.span))
|
.chain(index_name.iter().map(|i| i.span))
|
||||||
.chain(columns.iter().map(|i| i.span))
|
.chain(columns.iter().map(|i| i.span()))
|
||||||
.chain(characteristics.iter().map(|i| i.span())),
|
.chain(characteristics.iter().map(|i| i.span())),
|
||||||
),
|
),
|
||||||
TableConstraint::PrimaryKey {
|
TableConstraint::PrimaryKey {
|
||||||
|
@ -664,7 +666,7 @@ impl Spanned for TableConstraint {
|
||||||
name.iter()
|
name.iter()
|
||||||
.map(|i| i.span)
|
.map(|i| i.span)
|
||||||
.chain(index_name.iter().map(|i| i.span))
|
.chain(index_name.iter().map(|i| i.span))
|
||||||
.chain(columns.iter().map(|i| i.span))
|
.chain(columns.iter().map(|i| i.span()))
|
||||||
.chain(characteristics.iter().map(|i| i.span())),
|
.chain(characteristics.iter().map(|i| i.span())),
|
||||||
),
|
),
|
||||||
TableConstraint::ForeignKey {
|
TableConstraint::ForeignKey {
|
||||||
|
@ -700,7 +702,7 @@ impl Spanned for TableConstraint {
|
||||||
} => union_spans(
|
} => union_spans(
|
||||||
name.iter()
|
name.iter()
|
||||||
.map(|i| i.span)
|
.map(|i| i.span)
|
||||||
.chain(columns.iter().map(|i| i.span)),
|
.chain(columns.iter().map(|i| i.span())),
|
||||||
),
|
),
|
||||||
TableConstraint::FulltextOrSpatial {
|
TableConstraint::FulltextOrSpatial {
|
||||||
fulltext: _,
|
fulltext: _,
|
||||||
|
@ -711,7 +713,7 @@ impl Spanned for TableConstraint {
|
||||||
opt_index_name
|
opt_index_name
|
||||||
.iter()
|
.iter()
|
||||||
.map(|i| i.span)
|
.map(|i| i.span)
|
||||||
.chain(columns.iter().map(|i| i.span)),
|
.chain(columns.iter().map(|i| i.span())),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -745,6 +747,12 @@ impl Spanned for CreateIndex {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Spanned for IndexColumn {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.column.span()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Spanned for CaseStatement {
|
impl Spanned for CaseStatement {
|
||||||
fn span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
let CaseStatement {
|
let CaseStatement {
|
||||||
|
@ -917,6 +925,7 @@ impl Spanned for AlterColumnOperation {
|
||||||
AlterColumnOperation::SetDataType {
|
AlterColumnOperation::SetDataType {
|
||||||
data_type: _,
|
data_type: _,
|
||||||
using,
|
using,
|
||||||
|
had_set: _,
|
||||||
} => using.as_ref().map_or(Span::empty(), |u| u.span()),
|
} => using.as_ref().map_or(Span::empty(), |u| u.span()),
|
||||||
AlterColumnOperation::AddGenerated { .. } => Span::empty(),
|
AlterColumnOperation::AddGenerated { .. } => Span::empty(),
|
||||||
}
|
}
|
||||||
|
@ -984,10 +993,13 @@ impl Spanned for ViewColumnDef {
|
||||||
options,
|
options,
|
||||||
} = self;
|
} = self;
|
||||||
|
|
||||||
union_spans(
|
name.span.union_opt(&options.as_ref().map(|o| o.span()))
|
||||||
core::iter::once(name.span)
|
}
|
||||||
.chain(options.iter().flat_map(|i| i.iter().map(|k| k.span()))),
|
}
|
||||||
)
|
|
||||||
|
impl Spanned for ColumnOptions {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
union_spans(self.as_slice().iter().map(|i| i.span()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1048,7 +1060,9 @@ impl Spanned for CreateTableOptions {
|
||||||
match self {
|
match self {
|
||||||
CreateTableOptions::None => Span::empty(),
|
CreateTableOptions::None => Span::empty(),
|
||||||
CreateTableOptions::With(vec) => union_spans(vec.iter().map(|i| i.span())),
|
CreateTableOptions::With(vec) => union_spans(vec.iter().map(|i| i.span())),
|
||||||
CreateTableOptions::Options(vec) => union_spans(vec.iter().map(|i| i.span())),
|
CreateTableOptions::Options(vec) => {
|
||||||
|
union_spans(vec.as_slice().iter().map(|i| i.span()))
|
||||||
|
}
|
||||||
CreateTableOptions::Plain(vec) => union_spans(vec.iter().map(|i| i.span())),
|
CreateTableOptions::Plain(vec) => union_spans(vec.iter().map(|i| i.span())),
|
||||||
CreateTableOptions::TableProperties(vec) => union_spans(vec.iter().map(|i| i.span())),
|
CreateTableOptions::TableProperties(vec) => union_spans(vec.iter().map(|i| i.span())),
|
||||||
}
|
}
|
||||||
|
@ -1062,7 +1076,10 @@ impl Spanned for CreateTableOptions {
|
||||||
impl Spanned for AlterTableOperation {
|
impl Spanned for AlterTableOperation {
|
||||||
fn span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
match self {
|
match self {
|
||||||
AlterTableOperation::AddConstraint(table_constraint) => table_constraint.span(),
|
AlterTableOperation::AddConstraint {
|
||||||
|
constraint,
|
||||||
|
not_valid: _,
|
||||||
|
} => constraint.span(),
|
||||||
AlterTableOperation::AddColumn {
|
AlterTableOperation::AddColumn {
|
||||||
column_keyword: _,
|
column_keyword: _,
|
||||||
if_not_exists: _,
|
if_not_exists: _,
|
||||||
|
@ -1095,10 +1112,10 @@ impl Spanned for AlterTableOperation {
|
||||||
} => name.span,
|
} => name.span,
|
||||||
AlterTableOperation::DropColumn {
|
AlterTableOperation::DropColumn {
|
||||||
has_column_keyword: _,
|
has_column_keyword: _,
|
||||||
column_name,
|
column_names,
|
||||||
if_exists: _,
|
if_exists: _,
|
||||||
drop_behavior: _,
|
drop_behavior: _,
|
||||||
} => column_name.span,
|
} => union_spans(column_names.iter().map(|i| i.span)),
|
||||||
AlterTableOperation::AttachPartition { partition } => partition.span(),
|
AlterTableOperation::AttachPartition { partition } => partition.span(),
|
||||||
AlterTableOperation::DetachPartition { partition } => partition.span(),
|
AlterTableOperation::DetachPartition { partition } => partition.span(),
|
||||||
AlterTableOperation::FreezePartition {
|
AlterTableOperation::FreezePartition {
|
||||||
|
@ -1183,6 +1200,7 @@ impl Spanned for AlterTableOperation {
|
||||||
AlterTableOperation::AutoIncrement { value, .. } => value.span(),
|
AlterTableOperation::AutoIncrement { value, .. } => value.span(),
|
||||||
AlterTableOperation::Lock { .. } => Span::empty(),
|
AlterTableOperation::Lock { .. } => Span::empty(),
|
||||||
AlterTableOperation::ReplicaIdentity { .. } => Span::empty(),
|
AlterTableOperation::ReplicaIdentity { .. } => Span::empty(),
|
||||||
|
AlterTableOperation::ValidateConstraint { name } => name.span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1398,7 +1416,6 @@ impl Spanned for AssignmentTarget {
|
||||||
/// f.e. `IS NULL <expr>` reports as `<expr>::span`.
|
/// f.e. `IS NULL <expr>` reports as `<expr>::span`.
|
||||||
///
|
///
|
||||||
/// Missing spans:
|
/// Missing spans:
|
||||||
/// - [Expr::TypedString] # missing span for data_type
|
|
||||||
/// - [Expr::MatchAgainst] # MySQL specific
|
/// - [Expr::MatchAgainst] # MySQL specific
|
||||||
/// - [Expr::RLike] # MySQL specific
|
/// - [Expr::RLike] # MySQL specific
|
||||||
/// - [Expr::Struct] # BigQuery specific
|
/// - [Expr::Struct] # BigQuery specific
|
||||||
|
@ -1607,6 +1624,7 @@ impl Spanned for Expr {
|
||||||
Expr::OuterJoin(expr) => expr.span(),
|
Expr::OuterJoin(expr) => expr.span(),
|
||||||
Expr::Prior(expr) => expr.span(),
|
Expr::Prior(expr) => expr.span(),
|
||||||
Expr::Lambda(_) => Span::empty(),
|
Expr::Lambda(_) => Span::empty(),
|
||||||
|
Expr::MemberOf(member_of) => member_of.value.span().union(&member_of.array.span()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -116,7 +116,6 @@ impl From<ValueWithSpan> for Value {
|
||||||
derive(Visit, VisitMut),
|
derive(Visit, VisitMut),
|
||||||
visit(with = "visit_value")
|
visit(with = "visit_value")
|
||||||
)]
|
)]
|
||||||
|
|
||||||
pub enum Value {
|
pub enum Value {
|
||||||
/// Numeric literal
|
/// Numeric literal
|
||||||
#[cfg(not(feature = "bigdecimal"))]
|
#[cfg(not(feature = "bigdecimal"))]
|
||||||
|
@ -551,16 +550,16 @@ impl fmt::Display for EscapeUnicodeStringLiteral<'_> {
|
||||||
write!(f, r#"\\"#)?;
|
write!(f, r#"\\"#)?;
|
||||||
}
|
}
|
||||||
x if x.is_ascii() => {
|
x if x.is_ascii() => {
|
||||||
write!(f, "{}", c)?;
|
write!(f, "{c}")?;
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let codepoint = c as u32;
|
let codepoint = c as u32;
|
||||||
// if the character fits in 32 bits, we can use the \XXXX format
|
// if the character fits in 32 bits, we can use the \XXXX format
|
||||||
// otherwise, we need to use the \+XXXXXX format
|
// otherwise, we need to use the \+XXXXXX format
|
||||||
if codepoint <= 0xFFFF {
|
if codepoint <= 0xFFFF {
|
||||||
write!(f, "\\{:04X}", codepoint)?;
|
write!(f, "\\{codepoint:04X}")?;
|
||||||
} else {
|
} else {
|
||||||
write!(f, "\\+{:06X}", codepoint)?;
|
write!(f, "\\+{codepoint:06X}")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -926,10 +926,10 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn overflow() {
|
fn overflow() {
|
||||||
let cond = (0..1000)
|
let cond = (0..1000)
|
||||||
.map(|n| format!("X = {}", n))
|
.map(|n| format!("X = {n}"))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(" OR ");
|
.join(" OR ");
|
||||||
let sql = format!("SELECT x where {0}", cond);
|
let sql = format!("SELECT x where {cond}");
|
||||||
|
|
||||||
let dialect = GenericDialect {};
|
let dialect = GenericDialect {};
|
||||||
let tokens = Tokenizer::new(&dialect, sql.as_str()).tokenize().unwrap();
|
let tokens = Tokenizer::new(&dialect, sql.as_str()).tokenize().unwrap();
|
||||||
|
|
|
@ -46,7 +46,11 @@ pub struct BigQueryDialect;
|
||||||
|
|
||||||
impl Dialect for BigQueryDialect {
|
impl Dialect for BigQueryDialect {
|
||||||
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||||
self.maybe_parse_statement(parser)
|
if parser.parse_keyword(Keyword::BEGIN) {
|
||||||
|
return Some(parser.parse_begin_exception_end());
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#identifiers>
|
/// See <https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#identifiers>
|
||||||
|
@ -141,48 +145,3 @@ impl Dialect for BigQueryDialect {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BigQueryDialect {
|
|
||||||
fn maybe_parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
|
||||||
if parser.peek_keyword(Keyword::BEGIN) {
|
|
||||||
return Some(self.parse_begin(parser));
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse a `BEGIN` statement.
|
|
||||||
/// <https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend>
|
|
||||||
fn parse_begin(&self, parser: &mut Parser) -> Result<Statement, ParserError> {
|
|
||||||
parser.expect_keyword(Keyword::BEGIN)?;
|
|
||||||
|
|
||||||
let statements = parser.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
|
|
||||||
|
|
||||||
let has_exception_when_clause = parser.parse_keywords(&[
|
|
||||||
Keyword::EXCEPTION,
|
|
||||||
Keyword::WHEN,
|
|
||||||
Keyword::ERROR,
|
|
||||||
Keyword::THEN,
|
|
||||||
]);
|
|
||||||
let exception_statements = if has_exception_when_clause {
|
|
||||||
if !parser.peek_keyword(Keyword::END) {
|
|
||||||
Some(parser.parse_statement_list(&[Keyword::END])?)
|
|
||||||
} else {
|
|
||||||
Some(Default::default())
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
parser.expect_keyword(Keyword::END)?;
|
|
||||||
|
|
||||||
Ok(Statement::StartTransaction {
|
|
||||||
begin: true,
|
|
||||||
statements,
|
|
||||||
exception_statements,
|
|
||||||
has_end_keyword: true,
|
|
||||||
transaction: None,
|
|
||||||
modifier: None,
|
|
||||||
modes: Default::default(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -52,6 +52,10 @@ impl Dialect for GenericDialect {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_left_associative_joins_without_parens(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
fn supports_connect_by(&self) -> bool {
|
fn supports_connect_by(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
@ -108,6 +112,14 @@ impl Dialect for GenericDialect {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_from_first_select(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn supports_projection_trailing_commas(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
fn supports_asc_desc_in_column_definition(&self) -> bool {
|
fn supports_asc_desc_in_column_definition(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
|
@ -278,6 +278,34 @@ pub trait Dialect: Debug + Any {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Indicates whether the dialect supports left-associative join parsing
|
||||||
|
/// by default when parentheses are omitted in nested joins.
|
||||||
|
///
|
||||||
|
/// Most dialects (like MySQL or Postgres) assume **left-associative** precedence,
|
||||||
|
/// so a query like:
|
||||||
|
///
|
||||||
|
/// ```sql
|
||||||
|
/// SELECT * FROM t1 NATURAL JOIN t5 INNER JOIN t0 ON ...
|
||||||
|
/// ```
|
||||||
|
/// is interpreted as:
|
||||||
|
/// ```sql
|
||||||
|
/// ((t1 NATURAL JOIN t5) INNER JOIN t0 ON ...)
|
||||||
|
/// ```
|
||||||
|
/// and internally represented as a **flat list** of joins.
|
||||||
|
///
|
||||||
|
/// In contrast, some dialects (e.g. **Snowflake**) assume **right-associative**
|
||||||
|
/// precedence and interpret the same query as:
|
||||||
|
/// ```sql
|
||||||
|
/// (t1 NATURAL JOIN (t5 INNER JOIN t0 ON ...))
|
||||||
|
/// ```
|
||||||
|
/// which results in a **nested join** structure in the AST.
|
||||||
|
///
|
||||||
|
/// If this method returns `false`, the parser must build nested join trees
|
||||||
|
/// even in the absence of parentheses to reflect the correct associativity
|
||||||
|
fn supports_left_associative_joins_without_parens(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true if the dialect supports the `(+)` syntax for OUTER JOIN.
|
/// Returns true if the dialect supports the `(+)` syntax for OUTER JOIN.
|
||||||
fn supports_outer_join_operator(&self) -> bool {
|
fn supports_outer_join_operator(&self) -> bool {
|
||||||
false
|
false
|
||||||
|
@ -587,7 +615,7 @@ pub trait Dialect: Debug + Any {
|
||||||
}
|
}
|
||||||
|
|
||||||
let token = parser.peek_token();
|
let token = parser.peek_token();
|
||||||
debug!("get_next_precedence_full() {:?}", token);
|
debug!("get_next_precedence_full() {token:?}");
|
||||||
match token.token {
|
match token.token {
|
||||||
Token::Word(w) if w.keyword == Keyword::OR => Ok(p!(Or)),
|
Token::Word(w) if w.keyword == Keyword::OR => Ok(p!(Or)),
|
||||||
Token::Word(w) if w.keyword == Keyword::AND => Ok(p!(And)),
|
Token::Word(w) if w.keyword == Keyword::AND => Ok(p!(And)),
|
||||||
|
@ -621,6 +649,7 @@ pub trait Dialect: Debug + Any {
|
||||||
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
|
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
|
||||||
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
|
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
|
||||||
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
|
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
|
||||||
|
Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)),
|
||||||
_ => Ok(self.prec_unknown()),
|
_ => Ok(self.prec_unknown()),
|
||||||
},
|
},
|
||||||
Token::Word(w) if w.keyword == Keyword::IS => Ok(p!(Is)),
|
Token::Word(w) if w.keyword == Keyword::IS => Ok(p!(Is)),
|
||||||
|
@ -633,6 +662,7 @@ pub trait Dialect: Debug + Any {
|
||||||
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
|
Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)),
|
||||||
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
|
Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)),
|
||||||
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
|
Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)),
|
||||||
|
Token::Word(w) if w.keyword == Keyword::MEMBER => Ok(p!(Like)),
|
||||||
Token::Word(w) if w.keyword == Keyword::OPERATOR => Ok(p!(Between)),
|
Token::Word(w) if w.keyword == Keyword::OPERATOR => Ok(p!(Between)),
|
||||||
Token::Word(w) if w.keyword == Keyword::DIV => Ok(p!(MulDivModOp)),
|
Token::Word(w) if w.keyword == Keyword::DIV => Ok(p!(MulDivModOp)),
|
||||||
Token::Period => Ok(p!(Period)),
|
Token::Period => Ok(p!(Period)),
|
||||||
|
@ -1028,6 +1058,24 @@ pub trait Dialect: Debug + Any {
|
||||||
fn supports_set_names(&self) -> bool {
|
fn supports_set_names(&self) -> bool {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_space_separated_column_options(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the dialect supports the `USING` clause in an `ALTER COLUMN` statement.
|
||||||
|
/// Example:
|
||||||
|
/// ```sql
|
||||||
|
/// ALTER TABLE tbl ALTER COLUMN col SET DATA TYPE <type> USING <exp>`
|
||||||
|
/// ```
|
||||||
|
fn supports_alter_column_type_using(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the dialect supports `ALTER TABLE tbl DROP COLUMN c1, ..., cn`
|
||||||
|
fn supports_comma_separated_drop_column_list(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This represents the operators for which precedence must be defined
|
/// This represents the operators for which precedence must be defined
|
||||||
|
|
|
@ -104,7 +104,7 @@ impl Dialect for PostgreSqlDialect {
|
||||||
|
|
||||||
fn get_next_precedence(&self, parser: &Parser) -> Option<Result<u8, ParserError>> {
|
fn get_next_precedence(&self, parser: &Parser) -> Option<Result<u8, ParserError>> {
|
||||||
let token = parser.peek_token();
|
let token = parser.peek_token();
|
||||||
debug!("get_next_precedence() {:?}", token);
|
debug!("get_next_precedence() {token:?}");
|
||||||
|
|
||||||
// we only return some custom value here when the behaviour (not merely the numeric value) differs
|
// we only return some custom value here when the behaviour (not merely the numeric value) differs
|
||||||
// from the default implementation
|
// from the default implementation
|
||||||
|
@ -258,4 +258,8 @@ impl Dialect for PostgreSqlDialect {
|
||||||
fn supports_set_names(&self) -> bool {
|
fn supports_set_names(&self) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_alter_column_type_using(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,13 +80,15 @@ impl Dialect for RedshiftSqlDialect {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_identifier_start(&self, ch: char) -> bool {
|
fn is_identifier_start(&self, ch: char) -> bool {
|
||||||
// Extends Postgres dialect with sharp
|
// Extends Postgres dialect with sharp and UTF-8 multibyte chars
|
||||||
PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#'
|
// https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
|
||||||
|
PostgreSqlDialect {}.is_identifier_start(ch) || ch == '#' || !ch.is_ascii()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_identifier_part(&self, ch: char) -> bool {
|
fn is_identifier_part(&self, ch: char) -> bool {
|
||||||
// Extends Postgres dialect with sharp
|
// Extends Postgres dialect with sharp and UTF-8 multibyte chars
|
||||||
PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#'
|
// https://docs.aws.amazon.com/redshift/latest/dg/r_names.html
|
||||||
|
PostgreSqlDialect {}.is_identifier_part(ch) || ch == '#' || !ch.is_ascii()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// redshift has `CONVERT(type, value)` instead of `CONVERT(value, type)`
|
/// redshift has `CONVERT(type, value)` instead of `CONVERT(value, type)`
|
||||||
|
|
|
@ -131,6 +131,10 @@ impl Dialect for SnowflakeDialect {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||||
|
if parser.parse_keyword(Keyword::BEGIN) {
|
||||||
|
return Some(parser.parse_begin_exception_end());
|
||||||
|
}
|
||||||
|
|
||||||
if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) {
|
if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) {
|
||||||
// ALTER SESSION
|
// ALTER SESSION
|
||||||
let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) {
|
let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) {
|
||||||
|
@ -279,6 +283,10 @@ impl Dialect for SnowflakeDialect {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_left_associative_joins_without_parens(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
fn is_reserved_for_identifier(&self, kw: Keyword) -> bool {
|
fn is_reserved_for_identifier(&self, kw: Keyword) -> bool {
|
||||||
// Unreserve some keywords that Snowflake accepts as identifiers
|
// Unreserve some keywords that Snowflake accepts as identifiers
|
||||||
// See: https://docs.snowflake.com/en/sql-reference/reserved-keywords
|
// See: https://docs.snowflake.com/en/sql-reference/reserved-keywords
|
||||||
|
@ -293,9 +301,8 @@ impl Dialect for SnowflakeDialect {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_select_item_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
|
fn is_column_alias(&self, kw: &Keyword, parser: &mut Parser) -> bool {
|
||||||
explicit
|
match kw {
|
||||||
|| match kw {
|
|
||||||
// The following keywords can be considered an alias as long as
|
// The following keywords can be considered an alias as long as
|
||||||
// they are not followed by other tokens that may change their meaning
|
// they are not followed by other tokens that may change their meaning
|
||||||
// e.g. `SELECT * EXCEPT (col1) FROM tbl`
|
// e.g. `SELECT * EXCEPT (col1) FROM tbl`
|
||||||
|
@ -352,6 +359,14 @@ impl Dialect for SnowflakeDialect {
|
||||||
fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
|
fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] {
|
||||||
&RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR
|
&RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn supports_space_separated_column_options(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn supports_comma_separated_drop_column_list(&self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result<Statement, ParserError> {
|
fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result<Statement, ParserError> {
|
||||||
|
@ -1177,7 +1192,7 @@ fn parse_column_policy_property(
|
||||||
parser: &mut Parser,
|
parser: &mut Parser,
|
||||||
with: bool,
|
with: bool,
|
||||||
) -> Result<ColumnPolicyProperty, ParserError> {
|
) -> Result<ColumnPolicyProperty, ParserError> {
|
||||||
let policy_name = parser.parse_identifier()?;
|
let policy_name = parser.parse_object_name(false)?;
|
||||||
let using_columns = if parser.parse_keyword(Keyword::USING) {
|
let using_columns = if parser.parse_keyword(Keyword::USING) {
|
||||||
parser.expect_token(&Token::LParen)?;
|
parser.expect_token(&Token::LParen)?;
|
||||||
let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
|
let columns = parser.parse_comma_separated(|p| p.parse_identifier())?;
|
||||||
|
|
|
@ -395,6 +395,7 @@ define_keywords!(
|
||||||
FUNCTION,
|
FUNCTION,
|
||||||
FUNCTIONS,
|
FUNCTIONS,
|
||||||
FUSION,
|
FUSION,
|
||||||
|
FUTURE,
|
||||||
GENERAL,
|
GENERAL,
|
||||||
GENERATE,
|
GENERATE,
|
||||||
GENERATED,
|
GENERATED,
|
||||||
|
@ -646,6 +647,7 @@ define_keywords!(
|
||||||
ORDER,
|
ORDER,
|
||||||
ORDINALITY,
|
ORDINALITY,
|
||||||
ORGANIZATION,
|
ORGANIZATION,
|
||||||
|
OTHER,
|
||||||
OUT,
|
OUT,
|
||||||
OUTER,
|
OUTER,
|
||||||
OUTPUT,
|
OUTPUT,
|
||||||
|
@ -814,6 +816,7 @@ define_keywords!(
|
||||||
SERDE,
|
SERDE,
|
||||||
SERDEPROPERTIES,
|
SERDEPROPERTIES,
|
||||||
SERIALIZABLE,
|
SERIALIZABLE,
|
||||||
|
SERVER,
|
||||||
SERVICE,
|
SERVICE,
|
||||||
SESSION,
|
SESSION,
|
||||||
SESSION_USER,
|
SESSION_USER,
|
||||||
|
@ -934,6 +937,8 @@ define_keywords!(
|
||||||
TRY,
|
TRY,
|
||||||
TRY_CAST,
|
TRY_CAST,
|
||||||
TRY_CONVERT,
|
TRY_CONVERT,
|
||||||
|
TSQUERY,
|
||||||
|
TSVECTOR,
|
||||||
TUPLE,
|
TUPLE,
|
||||||
TYPE,
|
TYPE,
|
||||||
UBIGINT,
|
UBIGINT,
|
||||||
|
@ -977,6 +982,7 @@ define_keywords!(
|
||||||
UUID,
|
UUID,
|
||||||
VACUUM,
|
VACUUM,
|
||||||
VALID,
|
VALID,
|
||||||
|
VALIDATE,
|
||||||
VALIDATION_MODE,
|
VALIDATION_MODE,
|
||||||
VALUE,
|
VALUE,
|
||||||
VALUES,
|
VALUES,
|
||||||
|
@ -1012,6 +1018,7 @@ define_keywords!(
|
||||||
WITHOUT,
|
WITHOUT,
|
||||||
WITHOUT_ARRAY_WRAPPER,
|
WITHOUT_ARRAY_WRAPPER,
|
||||||
WORK,
|
WORK,
|
||||||
|
WRAPPER,
|
||||||
WRITE,
|
WRITE,
|
||||||
XML,
|
XML,
|
||||||
XMLNAMESPACES,
|
XMLNAMESPACES,
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -270,7 +270,7 @@ impl TestedDialects {
|
||||||
tokenizer = tokenizer.with_unescape(options.unescape);
|
tokenizer = tokenizer.with_unescape(options.unescape);
|
||||||
}
|
}
|
||||||
let tokens = tokenizer.tokenize().unwrap();
|
let tokens = tokenizer.tokenize().unwrap();
|
||||||
assert_eq!(expected, tokens, "Tokenized differently for {:?}", dialect);
|
assert_eq!(expected, tokens, "Tokenized differently for {dialect:?}");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -366,6 +366,11 @@ pub fn number(n: &str) -> Value {
|
||||||
Value::Number(n.parse().unwrap(), false)
|
Value::Number(n.parse().unwrap(), false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Creates a [Value::SingleQuotedString]
|
||||||
|
pub fn single_quoted_string(s: impl Into<String>) -> Value {
|
||||||
|
Value::SingleQuotedString(s.into())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn table_alias(name: impl Into<String>) -> Option<TableAlias> {
|
pub fn table_alias(name: impl Into<String>) -> Option<TableAlias> {
|
||||||
Some(TableAlias {
|
Some(TableAlias {
|
||||||
name: Ident::new(name),
|
name: Ident::new(name),
|
||||||
|
@ -448,3 +453,52 @@ pub fn call(function: &str, args: impl IntoIterator<Item = Expr>) -> Expr {
|
||||||
within_group: vec![],
|
within_group: vec![],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Gets the first index column (mysql calls it a key part) of the first index found in a
|
||||||
|
/// [`Statement::CreateIndex`], [`Statement::CreateTable`], or [`Statement::AlterTable`].
|
||||||
|
pub fn index_column(stmt: Statement) -> Expr {
|
||||||
|
match stmt {
|
||||||
|
Statement::CreateIndex(CreateIndex { columns, .. }) => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
Statement::CreateTable(CreateTable { constraints, .. }) => {
|
||||||
|
match constraints.first().unwrap() {
|
||||||
|
TableConstraint::Index { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
TableConstraint::Unique { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
TableConstraint::PrimaryKey { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
TableConstraint::FulltextOrSpatial { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Statement::AlterTable { operations, .. } => match operations.first().unwrap() {
|
||||||
|
AlterTableOperation::AddConstraint { constraint, .. } => {
|
||||||
|
match constraint {
|
||||||
|
TableConstraint::Index { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
TableConstraint::Unique { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
TableConstraint::PrimaryKey { columns, .. } => {
|
||||||
|
columns.first().unwrap().column.expr.clone()
|
||||||
|
}
|
||||||
|
TableConstraint::FulltextOrSpatial {
|
||||||
|
columns,
|
||||||
|
..
|
||||||
|
} => columns.first().unwrap().column.expr.clone(),
|
||||||
|
_ => panic!("Expected an index, unique, primary, full text, or spatial constraint (foreign key does not support general key part expressions)"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => panic!("Expected a constraint"),
|
||||||
|
},
|
||||||
|
_ => panic!("Expected CREATE INDEX, ALTER TABLE, or CREATE TABLE, got: {stmt:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1751,7 +1751,7 @@ impl<'a> Tokenizer<'a> {
|
||||||
(None, Some(tok)) => Ok(Some(tok)),
|
(None, Some(tok)) => Ok(Some(tok)),
|
||||||
(None, None) => self.tokenizer_error(
|
(None, None) => self.tokenizer_error(
|
||||||
chars.location(),
|
chars.location(),
|
||||||
format!("Expected a valid binary operator after '{}'", prefix),
|
format!("Expected a valid binary operator after '{prefix}'"),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1809,7 +1809,7 @@ impl<'a> Tokenizer<'a> {
|
||||||
chars.next();
|
chars.next();
|
||||||
|
|
||||||
let mut temp = String::new();
|
let mut temp = String::new();
|
||||||
let end_delimiter = format!("${}$", value);
|
let end_delimiter = format!("${value}$");
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match chars.next() {
|
match chars.next() {
|
||||||
|
@ -2402,13 +2402,13 @@ fn take_char_from_hex_digits(
|
||||||
location: chars.location(),
|
location: chars.location(),
|
||||||
})?;
|
})?;
|
||||||
let digit = next_char.to_digit(16).ok_or_else(|| TokenizerError {
|
let digit = next_char.to_digit(16).ok_or_else(|| TokenizerError {
|
||||||
message: format!("Invalid hex digit in escaped unicode string: {}", next_char),
|
message: format!("Invalid hex digit in escaped unicode string: {next_char}"),
|
||||||
location: chars.location(),
|
location: chars.location(),
|
||||||
})?;
|
})?;
|
||||||
result = result * 16 + digit;
|
result = result * 16 + digit;
|
||||||
}
|
}
|
||||||
char::from_u32(result).ok_or_else(|| TokenizerError {
|
char::from_u32(result).ok_or_else(|| TokenizerError {
|
||||||
message: format!("Invalid unicode character: {:x}", result),
|
message: format!("Invalid unicode character: {result:x}"),
|
||||||
location: chars.location(),
|
location: chars.location(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -3504,7 +3504,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_unescape(s: &str, expected: Option<&str>) {
|
fn check_unescape(s: &str, expected: Option<&str>) {
|
||||||
let s = format!("'{}'", s);
|
let s = format!("'{s}'");
|
||||||
let mut state = State {
|
let mut state = State {
|
||||||
peekable: s.chars().peekable(),
|
peekable: s.chars().peekable(),
|
||||||
line: 0,
|
line: 0,
|
||||||
|
|
|
@ -261,10 +261,10 @@ fn parse_at_at_identifier() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_begin() {
|
fn parse_begin() {
|
||||||
let sql = r#"BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT 2; END"#;
|
let sql = r#"BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT 2; RAISE USING MESSAGE = FORMAT('ERR: %s', 'Bad'); END"#;
|
||||||
let Statement::StartTransaction {
|
let Statement::StartTransaction {
|
||||||
statements,
|
statements,
|
||||||
exception_statements,
|
exception,
|
||||||
has_end_keyword,
|
has_end_keyword,
|
||||||
..
|
..
|
||||||
} = bigquery().verified_stmt(sql)
|
} = bigquery().verified_stmt(sql)
|
||||||
|
@ -272,7 +272,10 @@ fn parse_begin() {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
};
|
};
|
||||||
assert_eq!(1, statements.len());
|
assert_eq!(1, statements.len());
|
||||||
assert_eq!(1, exception_statements.unwrap().len());
|
assert!(exception.is_some());
|
||||||
|
|
||||||
|
let exception = exception.unwrap();
|
||||||
|
assert_eq!(1, exception.len());
|
||||||
assert!(has_end_keyword);
|
assert!(has_end_keyword);
|
||||||
|
|
||||||
bigquery().verified_stmt(
|
bigquery().verified_stmt(
|
||||||
|
@ -352,14 +355,16 @@ fn parse_create_view_with_options() {
|
||||||
ViewColumnDef {
|
ViewColumnDef {
|
||||||
name: Ident::new("age"),
|
name: Ident::new("age"),
|
||||||
data_type: None,
|
data_type: None,
|
||||||
options: Some(vec![ColumnOption::Options(vec![SqlOption::KeyValue {
|
options: Some(ColumnOptions::CommaSeparated(vec![ColumnOption::Options(
|
||||||
key: Ident::new("description"),
|
vec![SqlOption::KeyValue {
|
||||||
value: Expr::Value(
|
key: Ident::new("description"),
|
||||||
Value::DoubleQuotedString("field age".to_string()).with_span(
|
value: Expr::Value(
|
||||||
Span::new(Location::new(1, 42), Location::new(1, 52))
|
Value::DoubleQuotedString("field age".to_string()).with_span(
|
||||||
)
|
Span::new(Location::new(1, 42), Location::new(1, 52))
|
||||||
),
|
)
|
||||||
}])]),
|
),
|
||||||
|
}]
|
||||||
|
)])),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
columns
|
columns
|
||||||
|
@ -601,11 +606,13 @@ fn parse_nested_data_types() {
|
||||||
field_name: Some("a".into()),
|
field_name: Some("a".into()),
|
||||||
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(
|
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(
|
||||||
Box::new(DataType::Int64,)
|
Box::new(DataType::Int64,)
|
||||||
))
|
)),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some("b".into()),
|
field_name: Some("b".into()),
|
||||||
field_type: DataType::Bytes(Some(42))
|
field_type: DataType::Bytes(Some(42)),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
StructBracketKind::AngleBrackets
|
StructBracketKind::AngleBrackets
|
||||||
|
@ -619,6 +626,7 @@ fn parse_nested_data_types() {
|
||||||
vec![StructField {
|
vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Int64,
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
}],
|
}],
|
||||||
StructBracketKind::AngleBrackets
|
StructBracketKind::AngleBrackets
|
||||||
),
|
),
|
||||||
|
@ -632,35 +640,6 @@ fn parse_nested_data_types() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_invalid_brackets() {
|
|
||||||
let sql = "SELECT STRUCT<INT64>>(NULL)";
|
|
||||||
assert_eq!(
|
|
||||||
bigquery_and_generic()
|
|
||||||
.parse_sql_statements(sql)
|
|
||||||
.unwrap_err(),
|
|
||||||
ParserError::ParserError("unmatched > in STRUCT literal".to_string())
|
|
||||||
);
|
|
||||||
|
|
||||||
let sql = "SELECT STRUCT<STRUCT<INT64>>>(NULL)";
|
|
||||||
assert_eq!(
|
|
||||||
bigquery_and_generic()
|
|
||||||
.parse_sql_statements(sql)
|
|
||||||
.unwrap_err(),
|
|
||||||
ParserError::ParserError("Expected: (, found: >".to_string())
|
|
||||||
);
|
|
||||||
|
|
||||||
let sql = "CREATE TABLE table (x STRUCT<STRUCT<INT64>>>)";
|
|
||||||
assert_eq!(
|
|
||||||
bigquery_and_generic()
|
|
||||||
.parse_sql_statements(sql)
|
|
||||||
.unwrap_err(),
|
|
||||||
ParserError::ParserError(
|
|
||||||
"Expected: ',' or ')' after column definition, found: >".to_string()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_tuple_struct_literal() {
|
fn parse_tuple_struct_literal() {
|
||||||
// tuple syntax: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#tuple_syntax
|
// tuple syntax: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#tuple_syntax
|
||||||
|
@ -771,6 +750,7 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Int64,
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -799,7 +779,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
quote_style: None,
|
quote_style: None,
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
}),
|
}),
|
||||||
field_type: DataType::Int64
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident {
|
field_name: Some(Ident {
|
||||||
|
@ -807,7 +788,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
quote_style: None,
|
quote_style: None,
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
}),
|
}),
|
||||||
field_type: DataType::String(None)
|
field_type: DataType::String(None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -825,17 +807,20 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
field_name: Some("arr".into()),
|
field_name: Some("arr".into()),
|
||||||
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
|
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
|
||||||
DataType::Float64
|
DataType::Float64
|
||||||
)))
|
))),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some("str".into()),
|
field_name: Some("str".into()),
|
||||||
field_type: DataType::Struct(
|
field_type: DataType::Struct(
|
||||||
vec![StructField {
|
vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Bool
|
field_type: DataType::Bool,
|
||||||
|
options: None,
|
||||||
}],
|
}],
|
||||||
StructBracketKind::AngleBrackets
|
StructBracketKind::AngleBrackets
|
||||||
)
|
),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -858,13 +843,15 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
field_type: DataType::Struct(
|
field_type: DataType::Struct(
|
||||||
Default::default(),
|
Default::default(),
|
||||||
StructBracketKind::AngleBrackets
|
StructBracketKind::AngleBrackets
|
||||||
)
|
),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some("y".into()),
|
field_name: Some("y".into()),
|
||||||
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
|
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
|
||||||
DataType::Struct(Default::default(), StructBracketKind::AngleBrackets)
|
DataType::Struct(Default::default(), StructBracketKind::AngleBrackets)
|
||||||
)))
|
))),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -879,7 +866,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
values: vec![Expr::Value(Value::Boolean(true).with_empty_span())],
|
values: vec![Expr::Value(Value::Boolean(true).with_empty_span())],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Bool
|
field_type: DataType::Bool,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -891,7 +879,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
)],
|
)],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Bytes(Some(42))
|
field_type: DataType::Bytes(Some(42)),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[1])
|
expr_from_projection(&select.projection[1])
|
||||||
|
@ -907,7 +896,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
)],
|
)],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Date
|
field_type: DataType::Date,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -916,11 +906,15 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
&Expr::Struct {
|
&Expr::Struct {
|
||||||
values: vec![Expr::TypedString {
|
values: vec![Expr::TypedString {
|
||||||
data_type: DataType::Datetime(None),
|
data_type: DataType::Datetime(None),
|
||||||
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into())
|
value: ValueWithSpan {
|
||||||
|
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()),
|
||||||
|
span: Span::empty(),
|
||||||
|
},
|
||||||
}],
|
}],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Datetime(None)
|
field_type: DataType::Datetime(None),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[1])
|
expr_from_projection(&select.projection[1])
|
||||||
|
@ -930,7 +924,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
values: vec![Expr::value(number("5.0"))],
|
values: vec![Expr::value(number("5.0"))],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Float64
|
field_type: DataType::Float64,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[2])
|
expr_from_projection(&select.projection[2])
|
||||||
|
@ -940,7 +935,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
values: vec![Expr::value(number("1"))],
|
values: vec![Expr::value(number("1"))],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Int64
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[3])
|
expr_from_projection(&select.projection[3])
|
||||||
|
@ -962,7 +958,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
})],
|
})],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Interval
|
field_type: DataType::Interval,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -971,13 +968,17 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
&Expr::Struct {
|
&Expr::Struct {
|
||||||
values: vec![Expr::TypedString {
|
values: vec![Expr::TypedString {
|
||||||
data_type: DataType::JSON,
|
data_type: DataType::JSON,
|
||||||
value: Value::SingleQuotedString(
|
value: ValueWithSpan {
|
||||||
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
|
value: Value::SingleQuotedString(
|
||||||
)
|
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
|
||||||
|
),
|
||||||
|
span: Span::empty(),
|
||||||
|
}
|
||||||
}],
|
}],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::JSON
|
field_type: DataType::JSON,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[1])
|
expr_from_projection(&select.projection[1])
|
||||||
|
@ -993,7 +994,8 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
)],
|
)],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::String(Some(42))
|
field_type: DataType::String(Some(42)),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -1002,11 +1004,17 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
&Expr::Struct {
|
&Expr::Struct {
|
||||||
values: vec![Expr::TypedString {
|
values: vec![Expr::TypedString {
|
||||||
data_type: DataType::Timestamp(None, TimezoneInfo::None),
|
data_type: DataType::Timestamp(None, TimezoneInfo::None),
|
||||||
value: Value::SingleQuotedString("2008-12-25 15:30:00 America/Los_Angeles".into())
|
value: ValueWithSpan {
|
||||||
|
value: Value::SingleQuotedString(
|
||||||
|
"2008-12-25 15:30:00 America/Los_Angeles".into()
|
||||||
|
),
|
||||||
|
span: Span::empty(),
|
||||||
|
},
|
||||||
}],
|
}],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Timestamp(None, TimezoneInfo::None)
|
field_type: DataType::Timestamp(None, TimezoneInfo::None),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[1])
|
expr_from_projection(&select.projection[1])
|
||||||
|
@ -1016,11 +1024,15 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
&Expr::Struct {
|
&Expr::Struct {
|
||||||
values: vec![Expr::TypedString {
|
values: vec![Expr::TypedString {
|
||||||
data_type: DataType::Time(None, TimezoneInfo::None),
|
data_type: DataType::Time(None, TimezoneInfo::None),
|
||||||
value: Value::SingleQuotedString("15:30:00".into())
|
value: ValueWithSpan {
|
||||||
|
value: Value::SingleQuotedString("15:30:00".into()),
|
||||||
|
span: Span::empty(),
|
||||||
|
}
|
||||||
}],
|
}],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Time(None, TimezoneInfo::None)
|
field_type: DataType::Time(None, TimezoneInfo::None),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[2])
|
expr_from_projection(&select.projection[2])
|
||||||
|
@ -1033,11 +1045,15 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
&Expr::Struct {
|
&Expr::Struct {
|
||||||
values: vec![Expr::TypedString {
|
values: vec![Expr::TypedString {
|
||||||
data_type: DataType::Numeric(ExactNumberInfo::None),
|
data_type: DataType::Numeric(ExactNumberInfo::None),
|
||||||
value: Value::SingleQuotedString("1".into())
|
value: ValueWithSpan {
|
||||||
|
value: Value::SingleQuotedString("1".into()),
|
||||||
|
span: Span::empty(),
|
||||||
|
}
|
||||||
}],
|
}],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Numeric(ExactNumberInfo::None)
|
field_type: DataType::Numeric(ExactNumberInfo::None),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -1046,11 +1062,15 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
&Expr::Struct {
|
&Expr::Struct {
|
||||||
values: vec![Expr::TypedString {
|
values: vec![Expr::TypedString {
|
||||||
data_type: DataType::BigNumeric(ExactNumberInfo::None),
|
data_type: DataType::BigNumeric(ExactNumberInfo::None),
|
||||||
value: Value::SingleQuotedString("1".into())
|
value: ValueWithSpan {
|
||||||
|
value: Value::SingleQuotedString("1".into()),
|
||||||
|
span: Span::empty(),
|
||||||
|
}
|
||||||
}],
|
}],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::BigNumeric(ExactNumberInfo::None)
|
field_type: DataType::BigNumeric(ExactNumberInfo::None),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[1])
|
expr_from_projection(&select.projection[1])
|
||||||
|
@ -1067,10 +1087,12 @@ fn parse_typed_struct_syntax_bigquery() {
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some("key".into()),
|
field_name: Some("key".into()),
|
||||||
field_type: DataType::Int64,
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some("value".into()),
|
field_name: Some("value".into()),
|
||||||
field_type: DataType::Int64,
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -1092,6 +1114,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Int64,
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -1120,7 +1143,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
quote_style: None,
|
quote_style: None,
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
}),
|
}),
|
||||||
field_type: DataType::Int64
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident {
|
field_name: Some(Ident {
|
||||||
|
@ -1128,7 +1152,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
quote_style: None,
|
quote_style: None,
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
}),
|
}),
|
||||||
field_type: DataType::String(None)
|
field_type: DataType::String(None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -1151,13 +1176,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
field_type: DataType::Struct(
|
field_type: DataType::Struct(
|
||||||
Default::default(),
|
Default::default(),
|
||||||
StructBracketKind::AngleBrackets
|
StructBracketKind::AngleBrackets
|
||||||
)
|
),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some("y".into()),
|
field_name: Some("y".into()),
|
||||||
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
|
field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
|
||||||
DataType::Struct(Default::default(), StructBracketKind::AngleBrackets)
|
DataType::Struct(Default::default(), StructBracketKind::AngleBrackets)
|
||||||
)))
|
))),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -1172,7 +1199,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
values: vec![Expr::Value(Value::Boolean(true).with_empty_span())],
|
values: vec![Expr::Value(Value::Boolean(true).with_empty_span())],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Bool
|
field_type: DataType::Bool,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -1184,7 +1212,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
)],
|
)],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Bytes(Some(42))
|
field_type: DataType::Bytes(Some(42)),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[1])
|
expr_from_projection(&select.projection[1])
|
||||||
|
@ -1200,7 +1229,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
)],
|
)],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Date
|
field_type: DataType::Date,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -1209,11 +1239,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
&Expr::Struct {
|
&Expr::Struct {
|
||||||
values: vec![Expr::TypedString {
|
values: vec![Expr::TypedString {
|
||||||
data_type: DataType::Datetime(None),
|
data_type: DataType::Datetime(None),
|
||||||
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into())
|
value: ValueWithSpan {
|
||||||
|
value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()),
|
||||||
|
span: Span::empty(),
|
||||||
|
}
|
||||||
}],
|
}],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Datetime(None)
|
field_type: DataType::Datetime(None),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[1])
|
expr_from_projection(&select.projection[1])
|
||||||
|
@ -1223,7 +1257,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
values: vec![Expr::value(number("5.0"))],
|
values: vec![Expr::value(number("5.0"))],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Float64
|
field_type: DataType::Float64,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[2])
|
expr_from_projection(&select.projection[2])
|
||||||
|
@ -1233,7 +1268,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
values: vec![Expr::value(number("1"))],
|
values: vec![Expr::value(number("1"))],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Int64
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[3])
|
expr_from_projection(&select.projection[3])
|
||||||
|
@ -1255,7 +1291,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
})],
|
})],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Interval
|
field_type: DataType::Interval,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -1264,13 +1301,17 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
&Expr::Struct {
|
&Expr::Struct {
|
||||||
values: vec![Expr::TypedString {
|
values: vec![Expr::TypedString {
|
||||||
data_type: DataType::JSON,
|
data_type: DataType::JSON,
|
||||||
value: Value::SingleQuotedString(
|
value: ValueWithSpan {
|
||||||
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
|
value: Value::SingleQuotedString(
|
||||||
)
|
r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into()
|
||||||
|
),
|
||||||
|
span: Span::empty(),
|
||||||
|
}
|
||||||
}],
|
}],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::JSON
|
field_type: DataType::JSON,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[1])
|
expr_from_projection(&select.projection[1])
|
||||||
|
@ -1286,7 +1327,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
)],
|
)],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::String(Some(42))
|
field_type: DataType::String(Some(42)),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -1295,11 +1337,17 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
&Expr::Struct {
|
&Expr::Struct {
|
||||||
values: vec![Expr::TypedString {
|
values: vec![Expr::TypedString {
|
||||||
data_type: DataType::Timestamp(None, TimezoneInfo::None),
|
data_type: DataType::Timestamp(None, TimezoneInfo::None),
|
||||||
value: Value::SingleQuotedString("2008-12-25 15:30:00 America/Los_Angeles".into())
|
value: ValueWithSpan {
|
||||||
|
value: Value::SingleQuotedString(
|
||||||
|
"2008-12-25 15:30:00 America/Los_Angeles".into()
|
||||||
|
),
|
||||||
|
span: Span::empty(),
|
||||||
|
}
|
||||||
}],
|
}],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Timestamp(None, TimezoneInfo::None)
|
field_type: DataType::Timestamp(None, TimezoneInfo::None),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[1])
|
expr_from_projection(&select.projection[1])
|
||||||
|
@ -1309,11 +1357,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
&Expr::Struct {
|
&Expr::Struct {
|
||||||
values: vec![Expr::TypedString {
|
values: vec![Expr::TypedString {
|
||||||
data_type: DataType::Time(None, TimezoneInfo::None),
|
data_type: DataType::Time(None, TimezoneInfo::None),
|
||||||
value: Value::SingleQuotedString("15:30:00".into())
|
value: ValueWithSpan {
|
||||||
|
value: Value::SingleQuotedString("15:30:00".into()),
|
||||||
|
span: Span::empty(),
|
||||||
|
}
|
||||||
}],
|
}],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Time(None, TimezoneInfo::None)
|
field_type: DataType::Time(None, TimezoneInfo::None),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[2])
|
expr_from_projection(&select.projection[2])
|
||||||
|
@ -1326,11 +1378,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
&Expr::Struct {
|
&Expr::Struct {
|
||||||
values: vec![Expr::TypedString {
|
values: vec![Expr::TypedString {
|
||||||
data_type: DataType::Numeric(ExactNumberInfo::None),
|
data_type: DataType::Numeric(ExactNumberInfo::None),
|
||||||
value: Value::SingleQuotedString("1".into())
|
value: ValueWithSpan {
|
||||||
|
value: Value::SingleQuotedString("1".into()),
|
||||||
|
span: Span::empty(),
|
||||||
|
}
|
||||||
}],
|
}],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Numeric(ExactNumberInfo::None)
|
field_type: DataType::Numeric(ExactNumberInfo::None),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -1339,11 +1395,15 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
|
||||||
&Expr::Struct {
|
&Expr::Struct {
|
||||||
values: vec![Expr::TypedString {
|
values: vec![Expr::TypedString {
|
||||||
data_type: DataType::BigNumeric(ExactNumberInfo::None),
|
data_type: DataType::BigNumeric(ExactNumberInfo::None),
|
||||||
value: Value::SingleQuotedString("1".into())
|
value: ValueWithSpan {
|
||||||
|
value: Value::SingleQuotedString("1".into()),
|
||||||
|
span: Span::empty(),
|
||||||
|
}
|
||||||
}],
|
}],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::BigNumeric(ExactNumberInfo::None)
|
field_type: DataType::BigNumeric(ExactNumberInfo::None),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[1])
|
expr_from_projection(&select.projection[1])
|
||||||
|
@ -1360,7 +1420,8 @@ fn parse_typed_struct_with_field_name_bigquery() {
|
||||||
values: vec![Expr::value(number("5"))],
|
values: vec![Expr::value(number("5"))],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: Some(Ident::from("x")),
|
field_name: Some(Ident::from("x")),
|
||||||
field_type: DataType::Int64
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -1372,7 +1433,8 @@ fn parse_typed_struct_with_field_name_bigquery() {
|
||||||
)],
|
)],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: Some(Ident::from("y")),
|
field_name: Some(Ident::from("y")),
|
||||||
field_type: DataType::String(None)
|
field_type: DataType::String(None),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[1])
|
expr_from_projection(&select.projection[1])
|
||||||
|
@ -1387,11 +1449,13 @@ fn parse_typed_struct_with_field_name_bigquery() {
|
||||||
fields: vec![
|
fields: vec![
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::from("x")),
|
field_name: Some(Ident::from("x")),
|
||||||
field_type: DataType::Int64
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::from("y")),
|
field_name: Some(Ident::from("y")),
|
||||||
field_type: DataType::Int64
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -1409,7 +1473,8 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() {
|
||||||
values: vec![Expr::value(number("5"))],
|
values: vec![Expr::value(number("5"))],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: Some(Ident::from("x")),
|
field_name: Some(Ident::from("x")),
|
||||||
field_type: DataType::Int64
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[0])
|
expr_from_projection(&select.projection[0])
|
||||||
|
@ -1421,7 +1486,8 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() {
|
||||||
)],
|
)],
|
||||||
fields: vec![StructField {
|
fields: vec![StructField {
|
||||||
field_name: Some(Ident::from("y")),
|
field_name: Some(Ident::from("y")),
|
||||||
field_type: DataType::String(None)
|
field_type: DataType::String(None),
|
||||||
|
options: None,
|
||||||
}]
|
}]
|
||||||
},
|
},
|
||||||
expr_from_projection(&select.projection[1])
|
expr_from_projection(&select.projection[1])
|
||||||
|
@ -1436,11 +1502,13 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() {
|
||||||
fields: vec![
|
fields: vec![
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::from("x")),
|
field_name: Some(Ident::from("x")),
|
||||||
field_type: DataType::Int64
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::from("y")),
|
field_name: Some(Ident::from("y")),
|
||||||
field_type: DataType::Int64
|
field_type: DataType::Int64,
|
||||||
|
options: None,
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -2365,7 +2433,10 @@ fn test_triple_quote_typed_strings() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Expr::TypedString {
|
Expr::TypedString {
|
||||||
data_type: DataType::JSON,
|
data_type: DataType::JSON,
|
||||||
value: Value::TripleDoubleQuotedString(r#"{"foo":"bar's"}"#.into())
|
value: ValueWithSpan {
|
||||||
|
value: Value::TripleDoubleQuotedString(r#"{"foo":"bar's"}"#.into()),
|
||||||
|
span: Span::empty(),
|
||||||
|
}
|
||||||
},
|
},
|
||||||
expr
|
expr
|
||||||
);
|
);
|
||||||
|
@ -2407,3 +2478,91 @@ fn test_any_type() {
|
||||||
fn test_any_type_dont_break_custom_type() {
|
fn test_any_type_dont_break_custom_type() {
|
||||||
bigquery_and_generic().verified_stmt("CREATE TABLE foo (x ANY)");
|
bigquery_and_generic().verified_stmt("CREATE TABLE foo (x ANY)");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_struct_field_options() {
|
||||||
|
bigquery().verified_stmt(concat!(
|
||||||
|
"CREATE TABLE my_table (",
|
||||||
|
"f0 STRUCT<a STRING, b INT64>, ",
|
||||||
|
"f1 STRUCT<",
|
||||||
|
"a STRING OPTIONS(description = 'This is a string', type = 'string'), ",
|
||||||
|
"b INT64",
|
||||||
|
"> OPTIONS(description = 'This is a struct field')",
|
||||||
|
")",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_struct_trailing_and_nested_bracket() {
|
||||||
|
bigquery().verified_stmt(concat!(
|
||||||
|
"CREATE TABLE my_table (",
|
||||||
|
"f0 STRING, ",
|
||||||
|
"f1 STRUCT<a STRING, b STRUCT<c INT64, d STRING>>, ",
|
||||||
|
"f2 STRING",
|
||||||
|
")",
|
||||||
|
));
|
||||||
|
|
||||||
|
// More complex nested structs
|
||||||
|
bigquery().verified_stmt(concat!(
|
||||||
|
"CREATE TABLE my_table (",
|
||||||
|
"f0 STRING, ",
|
||||||
|
"f1 STRUCT<a STRING, b STRUCT<c INT64, d STRUCT<e STRING>>>, ",
|
||||||
|
"f2 STRUCT<h STRING, i STRUCT<j INT64, k STRUCT<l STRUCT<m STRING>>>>, ",
|
||||||
|
"f3 STRUCT<e STRING, f STRUCT<c INT64>>",
|
||||||
|
")",
|
||||||
|
));
|
||||||
|
|
||||||
|
// Bad case with missing closing bracket
|
||||||
|
assert_eq!(
|
||||||
|
ParserError::ParserError("Expected: >, found: )".to_owned()),
|
||||||
|
bigquery()
|
||||||
|
.parse_sql_statements("CREATE TABLE my_table(f1 STRUCT<a STRING, b INT64)")
|
||||||
|
.unwrap_err()
|
||||||
|
);
|
||||||
|
|
||||||
|
// Bad case with redundant closing bracket
|
||||||
|
assert_eq!(
|
||||||
|
ParserError::ParserError(
|
||||||
|
"unmatched > after parsing data type STRUCT<a STRING, b INT64>)".to_owned()
|
||||||
|
),
|
||||||
|
bigquery()
|
||||||
|
.parse_sql_statements("CREATE TABLE my_table(f1 STRUCT<a STRING, b INT64>>)")
|
||||||
|
.unwrap_err()
|
||||||
|
);
|
||||||
|
|
||||||
|
// Base case with redundant closing bracket in nested struct
|
||||||
|
assert_eq!(
|
||||||
|
ParserError::ParserError(
|
||||||
|
"Expected: ',' or ')' after column definition, found: >".to_owned()
|
||||||
|
),
|
||||||
|
bigquery()
|
||||||
|
.parse_sql_statements("CREATE TABLE my_table(f1 STRUCT<a STRUCT<b INT>>>, c INT64)")
|
||||||
|
.unwrap_err()
|
||||||
|
);
|
||||||
|
|
||||||
|
let sql = "SELECT STRUCT<INT64>>(NULL)";
|
||||||
|
assert_eq!(
|
||||||
|
bigquery_and_generic()
|
||||||
|
.parse_sql_statements(sql)
|
||||||
|
.unwrap_err(),
|
||||||
|
ParserError::ParserError("unmatched > in STRUCT literal".to_string())
|
||||||
|
);
|
||||||
|
|
||||||
|
let sql = "SELECT STRUCT<STRUCT<INT64>>>(NULL)";
|
||||||
|
assert_eq!(
|
||||||
|
bigquery_and_generic()
|
||||||
|
.parse_sql_statements(sql)
|
||||||
|
.unwrap_err(),
|
||||||
|
ParserError::ParserError("Expected: (, found: >".to_string())
|
||||||
|
);
|
||||||
|
|
||||||
|
let sql = "CREATE TABLE table (x STRUCT<STRUCT<INT64>>>)";
|
||||||
|
assert_eq!(
|
||||||
|
bigquery_and_generic()
|
||||||
|
.parse_sql_statements(sql)
|
||||||
|
.unwrap_err(),
|
||||||
|
ParserError::ParserError(
|
||||||
|
"Expected: ',' or ')' after column definition, found: >".to_string()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ use test_utils::*;
|
||||||
use sqlparser::ast::Expr::{BinaryOp, Identifier};
|
use sqlparser::ast::Expr::{BinaryOp, Identifier};
|
||||||
use sqlparser::ast::SelectItem::UnnamedExpr;
|
use sqlparser::ast::SelectItem::UnnamedExpr;
|
||||||
use sqlparser::ast::TableFactor::Table;
|
use sqlparser::ast::TableFactor::Table;
|
||||||
use sqlparser::ast::Value::Number;
|
use sqlparser::ast::Value::Boolean;
|
||||||
use sqlparser::ast::*;
|
use sqlparser::ast::*;
|
||||||
use sqlparser::dialect::ClickHouseDialect;
|
use sqlparser::dialect::ClickHouseDialect;
|
||||||
use sqlparser::dialect::GenericDialect;
|
use sqlparser::dialect::GenericDialect;
|
||||||
|
@ -224,6 +224,10 @@ fn parse_create_table() {
|
||||||
clickhouse().verified_stmt(
|
clickhouse().verified_stmt(
|
||||||
r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#,
|
r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#,
|
||||||
);
|
);
|
||||||
|
clickhouse().one_statement_parses_to(
|
||||||
|
"CREATE TABLE x (a int) ENGINE = MergeTree() ORDER BY a",
|
||||||
|
"CREATE TABLE x (a INT) ENGINE = MergeTree ORDER BY a",
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -669,11 +673,13 @@ fn parse_create_table_with_nested_data_types() {
|
||||||
DataType::Tuple(vec![
|
DataType::Tuple(vec![
|
||||||
StructField {
|
StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::FixedString(128)
|
field_type: DataType::FixedString(128),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: None,
|
field_name: None,
|
||||||
field_type: DataType::Int128
|
field_type: DataType::Int128,
|
||||||
|
options: None,
|
||||||
}
|
}
|
||||||
])
|
])
|
||||||
))),
|
))),
|
||||||
|
@ -685,12 +691,14 @@ fn parse_create_table_with_nested_data_types() {
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some("a".into()),
|
field_name: Some("a".into()),
|
||||||
field_type: DataType::Datetime64(9, None),
|
field_type: DataType::Datetime64(9, None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some("b".into()),
|
field_name: Some("b".into()),
|
||||||
field_type: DataType::Array(ArrayElemTypeDef::Parenthesis(
|
field_type: DataType::Array(ArrayElemTypeDef::Parenthesis(
|
||||||
Box::new(DataType::Uuid)
|
Box::new(DataType::Uuid)
|
||||||
))
|
)),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
]),
|
]),
|
||||||
options: vec![],
|
options: vec![],
|
||||||
|
@ -910,7 +918,7 @@ fn parse_create_view_with_fields_data_types() {
|
||||||
}]),
|
}]),
|
||||||
vec![]
|
vec![]
|
||||||
)),
|
)),
|
||||||
options: None
|
options: None,
|
||||||
},
|
},
|
||||||
ViewColumnDef {
|
ViewColumnDef {
|
||||||
name: "f".into(),
|
name: "f".into(),
|
||||||
|
@ -922,7 +930,7 @@ fn parse_create_view_with_fields_data_types() {
|
||||||
}]),
|
}]),
|
||||||
vec![]
|
vec![]
|
||||||
)),
|
)),
|
||||||
options: None
|
options: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
@ -961,38 +969,103 @@ fn parse_limit_by() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_settings_in_query() {
|
fn parse_settings_in_query() {
|
||||||
match clickhouse_and_generic()
|
fn check_settings(sql: &str, expected: Vec<Setting>) {
|
||||||
.verified_stmt(r#"SELECT * FROM t SETTINGS max_threads = 1, max_block_size = 10000"#)
|
match clickhouse_and_generic().verified_stmt(sql) {
|
||||||
{
|
Statement::Query(q) => {
|
||||||
Statement::Query(query) => {
|
assert_eq!(q.settings, Some(expected));
|
||||||
assert_eq!(
|
}
|
||||||
query.settings,
|
_ => unreachable!(),
|
||||||
Some(vec![
|
|
||||||
Setting {
|
|
||||||
key: Ident::new("max_threads"),
|
|
||||||
value: Number("1".parse().unwrap(), false)
|
|
||||||
},
|
|
||||||
Setting {
|
|
||||||
key: Ident::new("max_block_size"),
|
|
||||||
value: Number("10000".parse().unwrap(), false)
|
|
||||||
},
|
|
||||||
])
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
}
|
||||||
|
|
||||||
|
for (sql, expected_settings) in [
|
||||||
|
(
|
||||||
|
r#"SELECT * FROM t SETTINGS max_threads = 1, max_block_size = 10000"#,
|
||||||
|
vec![
|
||||||
|
Setting {
|
||||||
|
key: Ident::new("max_threads"),
|
||||||
|
value: Expr::value(number("1")),
|
||||||
|
},
|
||||||
|
Setting {
|
||||||
|
key: Ident::new("max_block_size"),
|
||||||
|
value: Expr::value(number("10000")),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
(
|
||||||
|
r#"SELECT * FROM t SETTINGS additional_table_filters = {'table_1': 'x != 2'}"#,
|
||||||
|
vec![Setting {
|
||||||
|
key: Ident::new("additional_table_filters"),
|
||||||
|
value: Expr::Dictionary(vec![DictionaryField {
|
||||||
|
key: Ident::with_quote('\'', "table_1"),
|
||||||
|
value: Expr::value(single_quoted_string("x != 2")).into(),
|
||||||
|
}]),
|
||||||
|
}],
|
||||||
|
),
|
||||||
|
(
|
||||||
|
r#"SELECT * FROM t SETTINGS additional_result_filter = 'x != 2', query_plan_optimize_lazy_materialization = false"#,
|
||||||
|
vec![
|
||||||
|
Setting {
|
||||||
|
key: Ident::new("additional_result_filter"),
|
||||||
|
value: Expr::value(single_quoted_string("x != 2")),
|
||||||
|
},
|
||||||
|
Setting {
|
||||||
|
key: Ident::new("query_plan_optimize_lazy_materialization"),
|
||||||
|
value: Expr::value(Boolean(false)),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
] {
|
||||||
|
check_settings(sql, expected_settings);
|
||||||
}
|
}
|
||||||
|
|
||||||
let invalid_cases = vec![
|
let invalid_cases = vec![
|
||||||
"SELECT * FROM t SETTINGS a",
|
("SELECT * FROM t SETTINGS a", "Expected: =, found: EOF"),
|
||||||
"SELECT * FROM t SETTINGS a=",
|
(
|
||||||
"SELECT * FROM t SETTINGS a=1, b",
|
"SELECT * FROM t SETTINGS a=",
|
||||||
"SELECT * FROM t SETTINGS a=1, b=",
|
"Expected: an expression, found: EOF",
|
||||||
"SELECT * FROM t SETTINGS a=1, b=c",
|
),
|
||||||
|
("SELECT * FROM t SETTINGS a=1, b", "Expected: =, found: EOF"),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a=1, b=",
|
||||||
|
"Expected: an expression, found: EOF",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {",
|
||||||
|
"Expected: identifier, found: EOF",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {'b'",
|
||||||
|
"Expected: :, found: EOF",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {'b': ",
|
||||||
|
"Expected: an expression, found: EOF",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {'b': 'c',}",
|
||||||
|
"Expected: identifier, found: }",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {'b': 'c', 'd'}",
|
||||||
|
"Expected: :, found: }",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {'b': 'c', 'd': }",
|
||||||
|
"Expected: an expression, found: }",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"SELECT * FROM t SETTINGS a = {ANY(b)}",
|
||||||
|
"Expected: :, found: (",
|
||||||
|
),
|
||||||
];
|
];
|
||||||
for sql in invalid_cases {
|
for (sql, error_msg) in invalid_cases {
|
||||||
clickhouse_and_generic()
|
assert_eq!(
|
||||||
.parse_sql_statements(sql)
|
clickhouse_and_generic()
|
||||||
.expect_err("Expected: SETTINGS key = value, found: ");
|
.parse_sql_statements(sql)
|
||||||
|
.unwrap_err(),
|
||||||
|
ParserError(error_msg.to_string())
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1341,7 +1414,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
clickhouse().verified_stmt(&format!("USE {}", object_name)),
|
clickhouse().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -1349,7 +1422,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
clickhouse().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
|
clickhouse().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
@ -1363,7 +1436,7 @@ fn parse_use() {
|
||||||
fn test_query_with_format_clause() {
|
fn test_query_with_format_clause() {
|
||||||
let format_options = vec!["TabSeparated", "JSONCompact", "NULL"];
|
let format_options = vec!["TabSeparated", "JSONCompact", "NULL"];
|
||||||
for format in &format_options {
|
for format in &format_options {
|
||||||
let sql = format!("SELECT * FROM t FORMAT {}", format);
|
let sql = format!("SELECT * FROM t FORMAT {format}");
|
||||||
match clickhouse_and_generic().verified_stmt(&sql) {
|
match clickhouse_and_generic().verified_stmt(&sql) {
|
||||||
Statement::Query(query) => {
|
Statement::Query(query) => {
|
||||||
if *format == "NULL" {
|
if *format == "NULL" {
|
||||||
|
@ -1546,11 +1619,11 @@ fn parse_select_table_function_settings() {
|
||||||
settings: Some(vec![
|
settings: Some(vec![
|
||||||
Setting {
|
Setting {
|
||||||
key: "s0".into(),
|
key: "s0".into(),
|
||||||
value: Value::Number("3".parse().unwrap(), false),
|
value: Expr::value(number("3")),
|
||||||
},
|
},
|
||||||
Setting {
|
Setting {
|
||||||
key: "s1".into(),
|
key: "s1".into(),
|
||||||
value: Value::SingleQuotedString("s".into()),
|
value: Expr::value(single_quoted_string("s")),
|
||||||
},
|
},
|
||||||
]),
|
]),
|
||||||
},
|
},
|
||||||
|
@ -1571,11 +1644,11 @@ fn parse_select_table_function_settings() {
|
||||||
settings: Some(vec![
|
settings: Some(vec![
|
||||||
Setting {
|
Setting {
|
||||||
key: "s0".into(),
|
key: "s0".into(),
|
||||||
value: Value::Number("3".parse().unwrap(), false),
|
value: Expr::value(number("3")),
|
||||||
},
|
},
|
||||||
Setting {
|
Setting {
|
||||||
key: "s1".into(),
|
key: "s1".into(),
|
||||||
value: Value::SingleQuotedString("s".into()),
|
value: Expr::value(single_quoted_string("s")),
|
||||||
},
|
},
|
||||||
]),
|
]),
|
||||||
},
|
},
|
||||||
|
@ -1585,7 +1658,6 @@ fn parse_select_table_function_settings() {
|
||||||
"SELECT * FROM t(SETTINGS a=)",
|
"SELECT * FROM t(SETTINGS a=)",
|
||||||
"SELECT * FROM t(SETTINGS a=1, b)",
|
"SELECT * FROM t(SETTINGS a=1, b)",
|
||||||
"SELECT * FROM t(SETTINGS a=1, b=)",
|
"SELECT * FROM t(SETTINGS a=1, b=)",
|
||||||
"SELECT * FROM t(SETTINGS a=1, b=c)",
|
|
||||||
];
|
];
|
||||||
for sql in invalid_cases {
|
for sql in invalid_cases {
|
||||||
clickhouse_and_generic()
|
clickhouse_and_generic()
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -19,6 +19,7 @@ use sqlparser::ast::helpers::attached_token::AttachedToken;
|
||||||
use sqlparser::ast::*;
|
use sqlparser::ast::*;
|
||||||
use sqlparser::dialect::{DatabricksDialect, GenericDialect};
|
use sqlparser::dialect::{DatabricksDialect, GenericDialect};
|
||||||
use sqlparser::parser::ParserError;
|
use sqlparser::parser::ParserError;
|
||||||
|
use sqlparser::tokenizer::Span;
|
||||||
use test_utils::*;
|
use test_utils::*;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
|
@ -213,7 +214,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
databricks().verified_stmt(&format!("USE {}", object_name)),
|
databricks().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -221,7 +222,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
databricks().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
|
databricks().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
@ -233,21 +234,21 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with keyword and different type of quotes
|
// Test single identifier with keyword and different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
databricks().verified_stmt(&format!("USE CATALOG {0}my_catalog{0}", quote)),
|
databricks().verified_stmt(&format!("USE CATALOG {quote}my_catalog{quote}")),
|
||||||
Statement::Use(Use::Catalog(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Catalog(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_catalog".to_string(),
|
"my_catalog".to_string(),
|
||||||
)])))
|
)])))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
databricks().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)),
|
databricks().verified_stmt(&format!("USE DATABASE {quote}my_database{quote}")),
|
||||||
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_database".to_string(),
|
"my_database".to_string(),
|
||||||
)])))
|
)])))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
databricks().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)),
|
databricks().verified_stmt(&format!("USE SCHEMA {quote}my_schema{quote}")),
|
||||||
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_schema".to_string(),
|
"my_schema".to_string(),
|
||||||
|
@ -328,7 +329,10 @@ fn data_type_timestamp_ntz() {
|
||||||
databricks().verified_expr("TIMESTAMP_NTZ '2025-03-29T18:52:00'"),
|
databricks().verified_expr("TIMESTAMP_NTZ '2025-03-29T18:52:00'"),
|
||||||
Expr::TypedString {
|
Expr::TypedString {
|
||||||
data_type: DataType::TimestampNtz,
|
data_type: DataType::TimestampNtz,
|
||||||
value: Value::SingleQuotedString("2025-03-29T18:52:00".to_owned())
|
value: ValueWithSpan {
|
||||||
|
value: Value::SingleQuotedString("2025-03-29T18:52:00".to_owned()),
|
||||||
|
span: Span::empty(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -357,6 +361,6 @@ fn data_type_timestamp_ntz() {
|
||||||
}]
|
}]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
s => panic!("Unexpected statement: {:?}", s),
|
s => panic!("Unexpected statement: {s:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ use test_utils::*;
|
||||||
|
|
||||||
use sqlparser::ast::*;
|
use sqlparser::ast::*;
|
||||||
use sqlparser::dialect::{DuckDbDialect, GenericDialect};
|
use sqlparser::dialect::{DuckDbDialect, GenericDialect};
|
||||||
|
use sqlparser::parser::ParserError;
|
||||||
|
|
||||||
fn duckdb() -> TestedDialects {
|
fn duckdb() -> TestedDialects {
|
||||||
TestedDialects::new(vec![Box::new(DuckDbDialect {})])
|
TestedDialects::new(vec![Box::new(DuckDbDialect {})])
|
||||||
|
@ -44,10 +45,12 @@ fn test_struct() {
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::new("v")),
|
field_name: Some(Ident::new("v")),
|
||||||
field_type: DataType::Varchar(None),
|
field_type: DataType::Varchar(None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::new("i")),
|
field_name: Some(Ident::new("i")),
|
||||||
field_type: DataType::Integer(None),
|
field_type: DataType::Integer(None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
StructBracketKind::Parentheses,
|
StructBracketKind::Parentheses,
|
||||||
|
@ -84,6 +87,7 @@ fn test_struct() {
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::new("v")),
|
field_name: Some(Ident::new("v")),
|
||||||
field_type: DataType::Varchar(None),
|
field_type: DataType::Varchar(None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::new("s")),
|
field_name: Some(Ident::new("s")),
|
||||||
|
@ -92,14 +96,17 @@ fn test_struct() {
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::new("a1")),
|
field_name: Some(Ident::new("a1")),
|
||||||
field_type: DataType::Integer(None),
|
field_type: DataType::Integer(None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
StructField {
|
StructField {
|
||||||
field_name: Some(Ident::new("a2")),
|
field_name: Some(Ident::new("a2")),
|
||||||
field_type: DataType::Varchar(None),
|
field_type: DataType::Varchar(None),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
StructBracketKind::Parentheses,
|
StructBracketKind::Parentheses,
|
||||||
),
|
),
|
||||||
|
options: None,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
StructBracketKind::Parentheses,
|
StructBracketKind::Parentheses,
|
||||||
|
@ -362,7 +369,7 @@ fn test_duckdb_specific_int_types() {
|
||||||
("HUGEINT", DataType::HugeInt),
|
("HUGEINT", DataType::HugeInt),
|
||||||
];
|
];
|
||||||
for (dtype_string, data_type) in duckdb_dtypes {
|
for (dtype_string, data_type) in duckdb_dtypes {
|
||||||
let sql = format!("SELECT 123::{}", dtype_string);
|
let sql = format!("SELECT 123::{dtype_string}");
|
||||||
let select = duckdb().verified_only_select(&sql);
|
let select = duckdb().verified_only_select(&sql);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
&Expr::Cast {
|
&Expr::Cast {
|
||||||
|
@ -786,7 +793,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
duckdb().verified_stmt(&format!("USE {}", object_name)),
|
duckdb().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -794,7 +801,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
duckdb().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)),
|
duckdb().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
@ -806,7 +813,9 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test double identifier with different type of quotes
|
// Test double identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
duckdb().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)),
|
duckdb().verified_stmt(&format!(
|
||||||
|
"USE {quote}CATALOG{quote}.{quote}my_schema{quote}"
|
||||||
|
)),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![
|
Statement::Use(Use::Object(ObjectName::from(vec![
|
||||||
Ident::with_quote(quote, "CATALOG"),
|
Ident::with_quote(quote, "CATALOG"),
|
||||||
Ident::with_quote(quote, "my_schema")
|
Ident::with_quote(quote, "my_schema")
|
||||||
|
@ -822,3 +831,32 @@ fn parse_use() {
|
||||||
])))
|
])))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_duckdb_trim() {
|
||||||
|
let real_sql = r#"SELECT customer_id, TRIM(item_price_id, '"', "a") AS item_price_id FROM models_staging.subscriptions"#;
|
||||||
|
assert_eq!(duckdb().verified_stmt(real_sql).to_string(), real_sql);
|
||||||
|
|
||||||
|
let sql_only_select = "SELECT TRIM('xyz', 'a')";
|
||||||
|
let select = duckdb().verified_only_select(sql_only_select);
|
||||||
|
assert_eq!(
|
||||||
|
&Expr::Trim {
|
||||||
|
expr: Box::new(Expr::Value(
|
||||||
|
Value::SingleQuotedString("xyz".to_owned()).with_empty_span()
|
||||||
|
)),
|
||||||
|
trim_where: None,
|
||||||
|
trim_what: None,
|
||||||
|
trim_characters: Some(vec![Expr::Value(
|
||||||
|
Value::SingleQuotedString("a".to_owned()).with_empty_span()
|
||||||
|
)]),
|
||||||
|
},
|
||||||
|
expr_from_projection(only(&select.projection))
|
||||||
|
);
|
||||||
|
|
||||||
|
// missing comma separation
|
||||||
|
let error_sql = "SELECT TRIM('xyz' 'a')";
|
||||||
|
assert_eq!(
|
||||||
|
ParserError::ParserError("Expected: ), found: 'a'".to_owned()),
|
||||||
|
duckdb().parse_sql_statements(error_sql).unwrap_err()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -524,7 +524,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
hive().verified_stmt(&format!("USE {}", object_name)),
|
hive().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -532,7 +532,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
hive().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
|
hive().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
|
|
@ -153,7 +153,8 @@ fn parse_create_procedure() {
|
||||||
quote_style: None,
|
quote_style: None,
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
},
|
},
|
||||||
data_type: DataType::Int(None)
|
data_type: DataType::Int(None),
|
||||||
|
mode: None,
|
||||||
},
|
},
|
||||||
ProcedureParam {
|
ProcedureParam {
|
||||||
name: Ident {
|
name: Ident {
|
||||||
|
@ -164,14 +165,16 @@ fn parse_create_procedure() {
|
||||||
data_type: DataType::Varchar(Some(CharacterLength::IntegerLength {
|
data_type: DataType::Varchar(Some(CharacterLength::IntegerLength {
|
||||||
length: 256,
|
length: 256,
|
||||||
unit: None
|
unit: None
|
||||||
}))
|
})),
|
||||||
|
mode: None,
|
||||||
}
|
}
|
||||||
]),
|
]),
|
||||||
name: ObjectName::from(vec![Ident {
|
name: ObjectName::from(vec![Ident {
|
||||||
value: "test".into(),
|
value: "test".into(),
|
||||||
quote_style: None,
|
quote_style: None,
|
||||||
span: Span::empty(),
|
span: Span::empty(),
|
||||||
}])
|
}]),
|
||||||
|
language: None,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -1670,7 +1673,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ms().verified_stmt(&format!("USE {}", object_name)),
|
ms().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -1678,7 +1681,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ms().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
|
ms().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
@ -2184,7 +2187,7 @@ fn parse_mssql_if_else() {
|
||||||
"IF 1 = 1 BEGIN SET @A = 1; END ELSE SET @A = 2;"
|
"IF 1 = 1 BEGIN SET @A = 1; END ELSE SET @A = 2;"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
_ => panic!("Unexpected statements: {:?}", stmts),
|
_ => panic!("Unexpected statements: {stmts:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2234,7 +2237,7 @@ fn test_mssql_if_statements_span() {
|
||||||
Span::new(Location::new(1, 21), Location::new(1, 36))
|
Span::new(Location::new(1, 21), Location::new(1, 36))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Blocks
|
// Blocks
|
||||||
|
@ -2255,7 +2258,7 @@ fn test_mssql_if_statements_span() {
|
||||||
Span::new(Location::new(1, 32), Location::new(1, 57))
|
Span::new(Location::new(1, 32), Location::new(1, 57))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -593,7 +593,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
mysql_and_generic().verified_stmt(&format!("USE {}", object_name)),
|
mysql_and_generic().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -601,8 +601,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
mysql_and_generic()
|
mysql_and_generic().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
.verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
|
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
@ -670,6 +669,20 @@ fn table_constraint_unique_primary_ctor(
|
||||||
characteristics: Option<ConstraintCharacteristics>,
|
characteristics: Option<ConstraintCharacteristics>,
|
||||||
unique_index_type_display: Option<KeyOrIndexDisplay>,
|
unique_index_type_display: Option<KeyOrIndexDisplay>,
|
||||||
) -> TableConstraint {
|
) -> TableConstraint {
|
||||||
|
let columns = columns
|
||||||
|
.into_iter()
|
||||||
|
.map(|ident| IndexColumn {
|
||||||
|
column: OrderByExpr {
|
||||||
|
expr: Expr::Identifier(ident),
|
||||||
|
options: OrderByOptions {
|
||||||
|
asc: None,
|
||||||
|
nulls_first: None,
|
||||||
|
},
|
||||||
|
with_fill: None,
|
||||||
|
},
|
||||||
|
operator_class: None,
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
match unique_index_type_display {
|
match unique_index_type_display {
|
||||||
Some(index_type_display) => TableConstraint::Unique {
|
Some(index_type_display) => TableConstraint::Unique {
|
||||||
name,
|
name,
|
||||||
|
@ -795,6 +808,67 @@ fn parse_create_table_primary_and_unique_key_with_index_options() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_prefix_key_part() {
|
||||||
|
let expected = vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::value(
|
||||||
|
number("10"),
|
||||||
|
)))];
|
||||||
|
for sql in [
|
||||||
|
"CREATE INDEX idx_index ON t(textcol(10))",
|
||||||
|
"ALTER TABLE tab ADD INDEX idx_index (textcol(10))",
|
||||||
|
"ALTER TABLE tab ADD PRIMARY KEY (textcol(10))",
|
||||||
|
"ALTER TABLE tab ADD UNIQUE KEY (textcol(10))",
|
||||||
|
"ALTER TABLE tab ADD UNIQUE KEY (textcol(10))",
|
||||||
|
"ALTER TABLE tab ADD FULLTEXT INDEX (textcol(10))",
|
||||||
|
"CREATE TABLE t (textcol TEXT, INDEX idx_index (textcol(10)))",
|
||||||
|
] {
|
||||||
|
match index_column(mysql_and_generic().verified_stmt(sql)) {
|
||||||
|
Expr::Function(Function {
|
||||||
|
name,
|
||||||
|
args: FunctionArguments::List(FunctionArgumentList { args, .. }),
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
assert_eq!(name.to_string(), "textcol");
|
||||||
|
assert_eq!(args, expected);
|
||||||
|
}
|
||||||
|
expr => panic!("unexpected expression {expr} for {sql}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_functional_key_part() {
|
||||||
|
assert_eq!(
|
||||||
|
index_column(
|
||||||
|
mysql_and_generic()
|
||||||
|
.verified_stmt("CREATE INDEX idx_index ON t((col COLLATE utf8mb4_bin) DESC)")
|
||||||
|
),
|
||||||
|
Expr::Nested(Box::new(Expr::Collate {
|
||||||
|
expr: Box::new(Expr::Identifier("col".into())),
|
||||||
|
collation: ObjectName(vec![sqlparser::ast::ObjectNamePart::Identifier(
|
||||||
|
Ident::new("utf8mb4_bin")
|
||||||
|
)]),
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
index_column(mysql_and_generic().verified_stmt(
|
||||||
|
r#"CREATE TABLE t (jsoncol JSON, PRIMARY KEY ((CAST(col ->> '$.id' AS UNSIGNED)) ASC))"#
|
||||||
|
)),
|
||||||
|
Expr::Nested(Box::new(Expr::Cast {
|
||||||
|
kind: CastKind::Cast,
|
||||||
|
expr: Box::new(Expr::BinaryOp {
|
||||||
|
left: Box::new(Expr::Identifier(Ident::new("col"))),
|
||||||
|
op: BinaryOperator::LongArrow,
|
||||||
|
right: Box::new(Expr::Value(
|
||||||
|
Value::SingleQuotedString("$.id".to_string()).with_empty_span()
|
||||||
|
)),
|
||||||
|
}),
|
||||||
|
data_type: DataType::Unsigned,
|
||||||
|
format: None,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_create_table_primary_and_unique_key_with_index_type() {
|
fn parse_create_table_primary_and_unique_key_with_index_type() {
|
||||||
let sqls = ["UNIQUE", "PRIMARY KEY"].map(|key_ty| {
|
let sqls = ["UNIQUE", "PRIMARY KEY"].map(|key_ty| {
|
||||||
|
@ -2188,11 +2262,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
|
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
|
||||||
assert_eq!(&[Ident::new("t"), Ident::new("15to29")], &parts[..]);
|
assert_eq!(&[Ident::new("t"), Ident::new("15to29")], &parts[..]);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 2: Qualified column name that starts with digits and on its own represents a number.
|
// Case 2: Qualified column name that starts with digits and on its own represents a number.
|
||||||
|
@ -2202,11 +2276,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
|
Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => {
|
||||||
assert_eq!(&[Ident::new("t"), Ident::new("15e29")], &parts[..]);
|
assert_eq!(&[Ident::new("t"), Ident::new("15e29")], &parts[..]);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 3: Unqualified, the same token is parsed as a number.
|
// Case 3: Unqualified, the same token is parsed as a number.
|
||||||
|
@ -2220,11 +2294,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
Some(SelectItem::UnnamedExpr(Expr::Value(ValueWithSpan { value, .. }))) => {
|
Some(SelectItem::UnnamedExpr(Expr::Value(ValueWithSpan { value, .. }))) => {
|
||||||
assert_eq!(&number("15e29"), value);
|
assert_eq!(&number("15e29"), value);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 4: Quoted simple identifier.
|
// Case 4: Quoted simple identifier.
|
||||||
|
@ -2234,11 +2308,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
Some(SelectItem::UnnamedExpr(Expr::Identifier(name))) => {
|
Some(SelectItem::UnnamedExpr(Expr::Identifier(name))) => {
|
||||||
assert_eq!(&Ident::with_quote('`', "15e29"), name);
|
assert_eq!(&Ident::with_quote('`', "15e29"), name);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 5: Quoted compound identifier.
|
// Case 5: Quoted compound identifier.
|
||||||
|
@ -2251,11 +2325,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
&parts[..]
|
&parts[..]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 6: Multi-level compound identifiers.
|
// Case 6: Multi-level compound identifiers.
|
||||||
|
@ -2272,11 +2346,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
&parts[..]
|
&parts[..]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Case 7: Multi-level compound quoted identifiers.
|
// Case 7: Multi-level compound quoted identifiers.
|
||||||
|
@ -2293,11 +2367,11 @@ fn parse_qualified_identifiers_with_numeric_prefix() {
|
||||||
&parts[..]
|
&parts[..]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
proj => panic!("Unexpected projection: {:?}", proj),
|
proj => panic!("Unexpected projection: {proj:?}"),
|
||||||
},
|
},
|
||||||
body => panic!("Unexpected statement body: {:?}", body),
|
body => panic!("Unexpected statement body: {body:?}"),
|
||||||
},
|
},
|
||||||
stmt => panic!("Unexpected statement: {:?}", stmt),
|
stmt => panic!("Unexpected statement: {stmt:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2802,7 +2876,7 @@ fn parse_alter_table_with_algorithm() {
|
||||||
vec![
|
vec![
|
||||||
AlterTableOperation::DropColumn {
|
AlterTableOperation::DropColumn {
|
||||||
has_column_keyword: true,
|
has_column_keyword: true,
|
||||||
column_name: Ident::new("password_digest"),
|
column_names: vec![Ident::new("password_digest")],
|
||||||
if_exists: false,
|
if_exists: false,
|
||||||
drop_behavior: None,
|
drop_behavior: None,
|
||||||
},
|
},
|
||||||
|
@ -2850,7 +2924,7 @@ fn parse_alter_table_with_lock() {
|
||||||
vec![
|
vec![
|
||||||
AlterTableOperation::DropColumn {
|
AlterTableOperation::DropColumn {
|
||||||
has_column_keyword: true,
|
has_column_keyword: true,
|
||||||
column_name: Ident::new("password_digest"),
|
column_names: vec![Ident::new("password_digest")],
|
||||||
if_exists: false,
|
if_exists: false,
|
||||||
drop_behavior: None,
|
drop_behavior: None,
|
||||||
},
|
},
|
||||||
|
@ -3542,6 +3616,7 @@ fn parse_grant() {
|
||||||
with_grant_option,
|
with_grant_option,
|
||||||
as_grantor: _,
|
as_grantor: _,
|
||||||
granted_by,
|
granted_by,
|
||||||
|
current_grants: _,
|
||||||
} = stmt
|
} = stmt
|
||||||
{
|
{
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -4035,3 +4110,28 @@ fn parse_alter_table_drop_index() {
|
||||||
AlterTableOperation::DropIndex { name } if name.value == "idx_index"
|
AlterTableOperation::DropIndex { name } if name.value == "idx_index"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_json_member_of() {
|
||||||
|
mysql().verified_stmt(r#"SELECT 17 MEMBER OF('[23, "abc", 17, "ab", 10]')"#);
|
||||||
|
let sql = r#"SELECT 'ab' MEMBER OF('[23, "abc", 17, "ab", 10]')"#;
|
||||||
|
let stmt = mysql().verified_stmt(sql);
|
||||||
|
match stmt {
|
||||||
|
Statement::Query(query) => {
|
||||||
|
let select = query.body.as_select().unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
select.projection,
|
||||||
|
vec![SelectItem::UnnamedExpr(Expr::MemberOf(MemberOf {
|
||||||
|
value: Box::new(Expr::Value(
|
||||||
|
Value::SingleQuotedString("ab".to_string()).into()
|
||||||
|
)),
|
||||||
|
array: Box::new(Expr::Value(
|
||||||
|
Value::SingleQuotedString(r#"[23, "abc", 17, "ab", 10]"#.to_string())
|
||||||
|
.into()
|
||||||
|
)),
|
||||||
|
}))]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => panic!("Unexpected statement {stmt}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -606,9 +606,10 @@ fn parse_alter_table_constraints_unique_nulls_distinct() {
|
||||||
.verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE NULLS NOT DISTINCT (c)")
|
.verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE NULLS NOT DISTINCT (c)")
|
||||||
{
|
{
|
||||||
Statement::AlterTable { operations, .. } => match &operations[0] {
|
Statement::AlterTable { operations, .. } => match &operations[0] {
|
||||||
AlterTableOperation::AddConstraint(TableConstraint::Unique {
|
AlterTableOperation::AddConstraint {
|
||||||
nulls_distinct, ..
|
constraint: TableConstraint::Unique { nulls_distinct, .. },
|
||||||
}) => {
|
..
|
||||||
|
} => {
|
||||||
assert_eq!(nulls_distinct, &NullsDistinctOption::NotDistinct)
|
assert_eq!(nulls_distinct, &NullsDistinctOption::NotDistinct)
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
|
@ -764,10 +765,7 @@ fn parse_drop_extension() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_alter_table_alter_column() {
|
fn parse_alter_table_alter_column() {
|
||||||
pg().one_statement_parses_to(
|
pg().verified_stmt("ALTER TABLE tab ALTER COLUMN is_active TYPE TEXT USING 'text'");
|
||||||
"ALTER TABLE tab ALTER COLUMN is_active TYPE TEXT USING 'text'",
|
|
||||||
"ALTER TABLE tab ALTER COLUMN is_active SET DATA TYPE TEXT USING 'text'",
|
|
||||||
);
|
|
||||||
|
|
||||||
match alter_table_op(
|
match alter_table_op(
|
||||||
pg().verified_stmt(
|
pg().verified_stmt(
|
||||||
|
@ -783,6 +781,7 @@ fn parse_alter_table_alter_column() {
|
||||||
AlterColumnOperation::SetDataType {
|
AlterColumnOperation::SetDataType {
|
||||||
data_type: DataType::Text,
|
data_type: DataType::Text,
|
||||||
using: Some(using_expr),
|
using: Some(using_expr),
|
||||||
|
had_set: true,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -2535,12 +2534,12 @@ fn parse_create_indices_with_operator_classes() {
|
||||||
for expected_operator_class in &operator_classes {
|
for expected_operator_class in &operator_classes {
|
||||||
let single_column_sql_statement = format!(
|
let single_column_sql_statement = format!(
|
||||||
"CREATE INDEX the_index_name ON users USING {expected_index_type} (concat_users_name(first_name, last_name){})",
|
"CREATE INDEX the_index_name ON users USING {expected_index_type} (concat_users_name(first_name, last_name){})",
|
||||||
expected_operator_class.as_ref().map(|oc| format!(" {}", oc))
|
expected_operator_class.as_ref().map(|oc| format!(" {oc}"))
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
);
|
);
|
||||||
let multi_column_sql_statement = format!(
|
let multi_column_sql_statement = format!(
|
||||||
"CREATE INDEX the_index_name ON users USING {expected_index_type} (column_name,concat_users_name(first_name, last_name){})",
|
"CREATE INDEX the_index_name ON users USING {expected_index_type} (column_name,concat_users_name(first_name, last_name){})",
|
||||||
expected_operator_class.as_ref().map(|oc| format!(" {}", oc))
|
expected_operator_class.as_ref().map(|oc| format!(" {oc}"))
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -3273,7 +3272,7 @@ fn test_fn_arg_with_value_operator() {
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
&args[..],
|
&args[..],
|
||||||
&[FunctionArg::ExprNamed { operator: FunctionArgOperator::Value, .. }]
|
&[FunctionArg::ExprNamed { operator: FunctionArgOperator::Value, .. }]
|
||||||
), "Invalid function argument: {:?}", args);
|
), "Invalid function argument: {args:?}");
|
||||||
}
|
}
|
||||||
other => panic!("Expected: JSON_OBJECT('name' VALUE 'value') to be parsed as a function, but got {other:?}"),
|
other => panic!("Expected: JSON_OBJECT('name' VALUE 'value') to be parsed as a function, but got {other:?}"),
|
||||||
}
|
}
|
||||||
|
@ -5258,7 +5257,10 @@ fn parse_at_time_zone() {
|
||||||
left: Box::new(Expr::AtTimeZone {
|
left: Box::new(Expr::AtTimeZone {
|
||||||
timestamp: Box::new(Expr::TypedString {
|
timestamp: Box::new(Expr::TypedString {
|
||||||
data_type: DataType::Timestamp(None, TimezoneInfo::None),
|
data_type: DataType::Timestamp(None, TimezoneInfo::None),
|
||||||
value: Value::SingleQuotedString("2001-09-28 01:00".to_string()),
|
value: ValueWithSpan {
|
||||||
|
value: Value::SingleQuotedString("2001-09-28 01:00".to_string()),
|
||||||
|
span: Span::empty(),
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
time_zone: Box::new(Expr::Cast {
|
time_zone: Box::new(Expr::Cast {
|
||||||
kind: CastKind::DoubleColon,
|
kind: CastKind::DoubleColon,
|
||||||
|
@ -5679,7 +5681,7 @@ fn parse_drop_trigger() {
|
||||||
"DROP TRIGGER{} check_update ON table_name{}",
|
"DROP TRIGGER{} check_update ON table_name{}",
|
||||||
if if_exists { " IF EXISTS" } else { "" },
|
if if_exists { " IF EXISTS" } else { "" },
|
||||||
option
|
option
|
||||||
.map(|o| format!(" {}", o))
|
.map(|o| format!(" {o}"))
|
||||||
.unwrap_or_else(|| "".to_string())
|
.unwrap_or_else(|| "".to_string())
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -5773,8 +5775,7 @@ fn parse_trigger_related_functions() {
|
||||||
// Now we parse the statements and check if they are parsed correctly.
|
// Now we parse the statements and check if they are parsed correctly.
|
||||||
let mut statements = pg()
|
let mut statements = pg()
|
||||||
.parse_sql_statements(&format!(
|
.parse_sql_statements(&format!(
|
||||||
"{}{}{}{}",
|
"{sql_table_creation}{sql_create_function}{sql_create_trigger}{sql_drop_trigger}"
|
||||||
sql_table_creation, sql_create_function, sql_create_trigger, sql_drop_trigger
|
|
||||||
))
|
))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
@ -6201,3 +6202,153 @@ fn parse_alter_table_replica_identity() {
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_ts_datatypes() {
|
||||||
|
match pg_and_generic().verified_stmt("CREATE TABLE foo (x TSVECTOR)") {
|
||||||
|
Statement::CreateTable(CreateTable { columns, .. }) => {
|
||||||
|
assert_eq!(
|
||||||
|
columns,
|
||||||
|
vec![ColumnDef {
|
||||||
|
name: "x".into(),
|
||||||
|
data_type: DataType::TsVector,
|
||||||
|
options: vec![],
|
||||||
|
}]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
match pg_and_generic().verified_stmt("CREATE TABLE foo (x TSQUERY)") {
|
||||||
|
Statement::CreateTable(CreateTable { columns, .. }) => {
|
||||||
|
assert_eq!(
|
||||||
|
columns,
|
||||||
|
vec![ColumnDef {
|
||||||
|
name: "x".into(),
|
||||||
|
data_type: DataType::TsQuery,
|
||||||
|
options: vec![],
|
||||||
|
}]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_alter_table_constraint_not_valid() {
|
||||||
|
match pg_and_generic().verified_stmt(
|
||||||
|
"ALTER TABLE foo ADD CONSTRAINT bar FOREIGN KEY (baz) REFERENCES other(ref) NOT VALID",
|
||||||
|
) {
|
||||||
|
Statement::AlterTable { operations, .. } => {
|
||||||
|
assert_eq!(
|
||||||
|
operations,
|
||||||
|
vec![AlterTableOperation::AddConstraint {
|
||||||
|
constraint: TableConstraint::ForeignKey {
|
||||||
|
name: Some("bar".into()),
|
||||||
|
index_name: None,
|
||||||
|
columns: vec!["baz".into()],
|
||||||
|
foreign_table: ObjectName::from(vec!["other".into()]),
|
||||||
|
referred_columns: vec!["ref".into()],
|
||||||
|
on_delete: None,
|
||||||
|
on_update: None,
|
||||||
|
characteristics: None,
|
||||||
|
},
|
||||||
|
not_valid: true,
|
||||||
|
}]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_alter_table_validate_constraint() {
|
||||||
|
match pg_and_generic().verified_stmt("ALTER TABLE foo VALIDATE CONSTRAINT bar") {
|
||||||
|
Statement::AlterTable { operations, .. } => {
|
||||||
|
assert_eq!(
|
||||||
|
operations,
|
||||||
|
vec![AlterTableOperation::ValidateConstraint { name: "bar".into() }]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_create_server() {
|
||||||
|
let test_cases = vec![
|
||||||
|
(
|
||||||
|
"CREATE SERVER myserver FOREIGN DATA WRAPPER postgres_fdw",
|
||||||
|
CreateServerStatement {
|
||||||
|
name: ObjectName::from(vec!["myserver".into()]),
|
||||||
|
if_not_exists: false,
|
||||||
|
server_type: None,
|
||||||
|
version: None,
|
||||||
|
foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]),
|
||||||
|
options: None,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"CREATE SERVER IF NOT EXISTS myserver TYPE 'server_type' VERSION 'server_version' FOREIGN DATA WRAPPER postgres_fdw",
|
||||||
|
CreateServerStatement {
|
||||||
|
name: ObjectName::from(vec!["myserver".into()]),
|
||||||
|
if_not_exists: true,
|
||||||
|
server_type: Some(Ident {
|
||||||
|
value: "server_type".to_string(),
|
||||||
|
quote_style: Some('\''),
|
||||||
|
span: Span::empty(),
|
||||||
|
}),
|
||||||
|
version: Some(Ident {
|
||||||
|
value: "server_version".to_string(),
|
||||||
|
quote_style: Some('\''),
|
||||||
|
span: Span::empty(),
|
||||||
|
}),
|
||||||
|
foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]),
|
||||||
|
options: None,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"CREATE SERVER myserver2 FOREIGN DATA WRAPPER postgres_fdw OPTIONS (host 'foo', dbname 'foodb', port '5432')",
|
||||||
|
CreateServerStatement {
|
||||||
|
name: ObjectName::from(vec!["myserver2".into()]),
|
||||||
|
if_not_exists: false,
|
||||||
|
server_type: None,
|
||||||
|
version: None,
|
||||||
|
foreign_data_wrapper: ObjectName::from(vec!["postgres_fdw".into()]),
|
||||||
|
options: Some(vec![
|
||||||
|
CreateServerOption {
|
||||||
|
key: "host".into(),
|
||||||
|
value: Ident {
|
||||||
|
value: "foo".to_string(),
|
||||||
|
quote_style: Some('\''),
|
||||||
|
span: Span::empty(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
CreateServerOption {
|
||||||
|
key: "dbname".into(),
|
||||||
|
value: Ident {
|
||||||
|
value: "foodb".to_string(),
|
||||||
|
quote_style: Some('\''),
|
||||||
|
span: Span::empty(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
CreateServerOption {
|
||||||
|
key: "port".into(),
|
||||||
|
value: Ident {
|
||||||
|
value: "5432".to_string(),
|
||||||
|
quote_style: Some('\''),
|
||||||
|
span: Span::empty(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
];
|
||||||
|
|
||||||
|
for (sql, expected) in test_cases {
|
||||||
|
let Statement::CreateServer(stmt) = pg_and_generic().verified_stmt(sql) else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
assert_eq!(stmt, expected);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -402,3 +402,8 @@ fn parse_extract_single_quotes() {
|
||||||
fn parse_string_literal_backslash_escape() {
|
fn parse_string_literal_backslash_escape() {
|
||||||
redshift().one_statement_parses_to(r#"SELECT 'l\'auto'"#, "SELECT 'l''auto'");
|
redshift().one_statement_parses_to(r#"SELECT 'l\'auto'"#, "SELECT 'l''auto'");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_utf8_multibyte_idents() {
|
||||||
|
redshift().verified_stmt("SELECT 🚀.city AS 🎸 FROM customers AS 🚀");
|
||||||
|
}
|
||||||
|
|
|
@ -270,8 +270,8 @@ fn test_snowflake_create_table_with_tag() {
|
||||||
assert_eq!("my_table", name.to_string());
|
assert_eq!("my_table", name.to_string());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Some(vec![
|
Some(vec![
|
||||||
Tag::new("A".into(), "TAG A".to_string()),
|
Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".to_string()),
|
||||||
Tag::new("B".into(), "TAG B".to_string())
|
Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".to_string())
|
||||||
]),
|
]),
|
||||||
with_tags
|
with_tags
|
||||||
);
|
);
|
||||||
|
@ -291,8 +291,8 @@ fn test_snowflake_create_table_with_tag() {
|
||||||
assert_eq!("my_table", name.to_string());
|
assert_eq!("my_table", name.to_string());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Some(vec![
|
Some(vec![
|
||||||
Tag::new("A".into(), "TAG A".to_string()),
|
Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".to_string()),
|
||||||
Tag::new("B".into(), "TAG B".to_string())
|
Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".to_string())
|
||||||
]),
|
]),
|
||||||
with_tags
|
with_tags
|
||||||
);
|
);
|
||||||
|
@ -731,7 +731,7 @@ fn test_snowflake_create_table_with_columns_masking_policy() {
|
||||||
option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy(
|
option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy(
|
||||||
ColumnPolicyProperty {
|
ColumnPolicyProperty {
|
||||||
with,
|
with,
|
||||||
policy_name: "p".into(),
|
policy_name: ObjectName::from(vec![Ident::new("p")]),
|
||||||
using_columns,
|
using_columns,
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
|
@ -765,7 +765,7 @@ fn test_snowflake_create_table_with_columns_projection_policy() {
|
||||||
option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(
|
option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(
|
||||||
ColumnPolicyProperty {
|
ColumnPolicyProperty {
|
||||||
with,
|
with,
|
||||||
policy_name: "p".into(),
|
policy_name: ObjectName::from(vec![Ident::new("p")]),
|
||||||
using_columns: None,
|
using_columns: None,
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
|
@ -802,8 +802,14 @@ fn test_snowflake_create_table_with_columns_tags() {
|
||||||
option: ColumnOption::Tags(TagsColumnOption {
|
option: ColumnOption::Tags(TagsColumnOption {
|
||||||
with,
|
with,
|
||||||
tags: vec![
|
tags: vec![
|
||||||
Tag::new("A".into(), "TAG A".into()),
|
Tag::new(
|
||||||
Tag::new("B".into(), "TAG B".into()),
|
ObjectName::from(vec![Ident::new("A")]),
|
||||||
|
"TAG A".into()
|
||||||
|
),
|
||||||
|
Tag::new(
|
||||||
|
ObjectName::from(vec![Ident::new("B")]),
|
||||||
|
"TAG B".into()
|
||||||
|
),
|
||||||
]
|
]
|
||||||
}),
|
}),
|
||||||
}],
|
}],
|
||||||
|
@ -846,7 +852,7 @@ fn test_snowflake_create_table_with_several_column_options() {
|
||||||
option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy(
|
option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy(
|
||||||
ColumnPolicyProperty {
|
ColumnPolicyProperty {
|
||||||
with: true,
|
with: true,
|
||||||
policy_name: "p1".into(),
|
policy_name: ObjectName::from(vec![Ident::new("p1")]),
|
||||||
using_columns: Some(vec!["a".into(), "b".into()]),
|
using_columns: Some(vec!["a".into(), "b".into()]),
|
||||||
}
|
}
|
||||||
)),
|
)),
|
||||||
|
@ -856,8 +862,14 @@ fn test_snowflake_create_table_with_several_column_options() {
|
||||||
option: ColumnOption::Tags(TagsColumnOption {
|
option: ColumnOption::Tags(TagsColumnOption {
|
||||||
with: true,
|
with: true,
|
||||||
tags: vec![
|
tags: vec![
|
||||||
Tag::new("A".into(), "TAG A".into()),
|
Tag::new(
|
||||||
Tag::new("B".into(), "TAG B".into()),
|
ObjectName::from(vec![Ident::new("A")]),
|
||||||
|
"TAG A".into()
|
||||||
|
),
|
||||||
|
Tag::new(
|
||||||
|
ObjectName::from(vec![Ident::new("B")]),
|
||||||
|
"TAG B".into()
|
||||||
|
),
|
||||||
]
|
]
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
@ -878,7 +890,7 @@ fn test_snowflake_create_table_with_several_column_options() {
|
||||||
option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(
|
option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy(
|
||||||
ColumnPolicyProperty {
|
ColumnPolicyProperty {
|
||||||
with: false,
|
with: false,
|
||||||
policy_name: "p2".into(),
|
policy_name: ObjectName::from(vec![Ident::new("p2")]),
|
||||||
using_columns: None,
|
using_columns: None,
|
||||||
}
|
}
|
||||||
)),
|
)),
|
||||||
|
@ -888,8 +900,14 @@ fn test_snowflake_create_table_with_several_column_options() {
|
||||||
option: ColumnOption::Tags(TagsColumnOption {
|
option: ColumnOption::Tags(TagsColumnOption {
|
||||||
with: false,
|
with: false,
|
||||||
tags: vec![
|
tags: vec![
|
||||||
Tag::new("C".into(), "TAG C".into()),
|
Tag::new(
|
||||||
Tag::new("D".into(), "TAG D".into()),
|
ObjectName::from(vec![Ident::new("C")]),
|
||||||
|
"TAG C".into()
|
||||||
|
),
|
||||||
|
Tag::new(
|
||||||
|
ObjectName::from(vec![Ident::new("D")]),
|
||||||
|
"TAG D".into()
|
||||||
|
),
|
||||||
]
|
]
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
@ -942,8 +960,8 @@ fn test_snowflake_create_iceberg_table_all_options() {
|
||||||
with_aggregation_policy.map(|name| name.to_string())
|
with_aggregation_policy.map(|name| name.to_string())
|
||||||
);
|
);
|
||||||
assert_eq!(Some(vec![
|
assert_eq!(Some(vec![
|
||||||
Tag::new("A".into(), "TAG A".into()),
|
Tag::new(ObjectName::from(vec![Ident::new("A")]), "TAG A".into()),
|
||||||
Tag::new("B".into(), "TAG B".into()),
|
Tag::new(ObjectName::from(vec![Ident::new("B")]), "TAG B".into()),
|
||||||
]), with_tags);
|
]), with_tags);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -2510,10 +2528,7 @@ fn test_snowflake_stage_object_names_into_location() {
|
||||||
.zip(allowed_object_names.iter_mut())
|
.zip(allowed_object_names.iter_mut())
|
||||||
{
|
{
|
||||||
let (formatted_name, object_name) = it;
|
let (formatted_name, object_name) = it;
|
||||||
let sql = format!(
|
let sql = format!("COPY INTO {formatted_name} FROM 'gcs://mybucket/./../a.csv'");
|
||||||
"COPY INTO {} FROM 'gcs://mybucket/./../a.csv'",
|
|
||||||
formatted_name
|
|
||||||
);
|
|
||||||
match snowflake().verified_stmt(&sql) {
|
match snowflake().verified_stmt(&sql) {
|
||||||
Statement::CopyIntoSnowflake { into, .. } => {
|
Statement::CopyIntoSnowflake { into, .. } => {
|
||||||
assert_eq!(into.0, object_name.0)
|
assert_eq!(into.0, object_name.0)
|
||||||
|
@ -2536,10 +2551,7 @@ fn test_snowflake_stage_object_names_into_table() {
|
||||||
.zip(allowed_object_names.iter_mut())
|
.zip(allowed_object_names.iter_mut())
|
||||||
{
|
{
|
||||||
let (formatted_name, object_name) = it;
|
let (formatted_name, object_name) = it;
|
||||||
let sql = format!(
|
let sql = format!("COPY INTO {formatted_name} FROM 'gcs://mybucket/./../a.csv'");
|
||||||
"COPY INTO {} FROM 'gcs://mybucket/./../a.csv'",
|
|
||||||
formatted_name
|
|
||||||
);
|
|
||||||
match snowflake().verified_stmt(&sql) {
|
match snowflake().verified_stmt(&sql) {
|
||||||
Statement::CopyIntoSnowflake { into, .. } => {
|
Statement::CopyIntoSnowflake { into, .. } => {
|
||||||
assert_eq!(into.0, object_name.0)
|
assert_eq!(into.0, object_name.0)
|
||||||
|
@ -3020,7 +3032,7 @@ fn parse_use() {
|
||||||
for object_name in &valid_object_names {
|
for object_name in &valid_object_names {
|
||||||
// Test single identifier without quotes
|
// Test single identifier without quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE {}", object_name)),
|
snowflake().verified_stmt(&format!("USE {object_name}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::new(
|
||||||
object_name.to_string()
|
object_name.to_string()
|
||||||
)])))
|
)])))
|
||||||
|
@ -3028,7 +3040,7 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single identifier with different type of quotes
|
// Test single identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)),
|
snowflake().verified_stmt(&format!("USE {quote}{object_name}{quote}")),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
object_name.to_string(),
|
object_name.to_string(),
|
||||||
|
@ -3040,7 +3052,9 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test double identifier with different type of quotes
|
// Test double identifier with different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)),
|
snowflake().verified_stmt(&format!(
|
||||||
|
"USE {quote}CATALOG{quote}.{quote}my_schema{quote}"
|
||||||
|
)),
|
||||||
Statement::Use(Use::Object(ObjectName::from(vec![
|
Statement::Use(Use::Object(ObjectName::from(vec![
|
||||||
Ident::with_quote(quote, "CATALOG"),
|
Ident::with_quote(quote, "CATALOG"),
|
||||||
Ident::with_quote(quote, "my_schema")
|
Ident::with_quote(quote, "my_schema")
|
||||||
|
@ -3059,35 +3073,37 @@ fn parse_use() {
|
||||||
for "e in "e_styles {
|
for "e in "e_styles {
|
||||||
// Test single and double identifier with keyword and different type of quotes
|
// Test single and double identifier with keyword and different type of quotes
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)),
|
snowflake().verified_stmt(&format!("USE DATABASE {quote}my_database{quote}")),
|
||||||
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_database".to_string(),
|
"my_database".to_string(),
|
||||||
)])))
|
)])))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)),
|
snowflake().verified_stmt(&format!("USE SCHEMA {quote}my_schema{quote}")),
|
||||||
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_schema".to_string(),
|
"my_schema".to_string(),
|
||||||
)])))
|
)])))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE SCHEMA {0}CATALOG{0}.{0}my_schema{0}", quote)),
|
snowflake().verified_stmt(&format!(
|
||||||
|
"USE SCHEMA {quote}CATALOG{quote}.{quote}my_schema{quote}"
|
||||||
|
)),
|
||||||
Statement::Use(Use::Schema(ObjectName::from(vec![
|
Statement::Use(Use::Schema(ObjectName::from(vec![
|
||||||
Ident::with_quote(quote, "CATALOG"),
|
Ident::with_quote(quote, "CATALOG"),
|
||||||
Ident::with_quote(quote, "my_schema")
|
Ident::with_quote(quote, "my_schema")
|
||||||
])))
|
])))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE ROLE {0}my_role{0}", quote)),
|
snowflake().verified_stmt(&format!("USE ROLE {quote}my_role{quote}")),
|
||||||
Statement::Use(Use::Role(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Role(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_role".to_string(),
|
"my_role".to_string(),
|
||||||
)])))
|
)])))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
snowflake().verified_stmt(&format!("USE WAREHOUSE {0}my_wh{0}", quote)),
|
snowflake().verified_stmt(&format!("USE WAREHOUSE {quote}my_wh{quote}")),
|
||||||
Statement::Use(Use::Warehouse(ObjectName::from(vec![Ident::with_quote(
|
Statement::Use(Use::Warehouse(ObjectName::from(vec![Ident::with_quote(
|
||||||
quote,
|
quote,
|
||||||
"my_wh".to_string(),
|
"my_wh".to_string(),
|
||||||
|
@ -3124,7 +3140,7 @@ fn view_comment_option_should_be_after_column_list() {
|
||||||
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') COMMENT = 'Comment' AS SELECT a FROM t",
|
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') COMMENT = 'Comment' AS SELECT a FROM t",
|
||||||
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') WITH (foo = bar) COMMENT = 'Comment' AS SELECT a FROM t",
|
"CREATE OR REPLACE VIEW v (a COMMENT 'a comment', b, c COMMENT 'c comment') WITH (foo = bar) COMMENT = 'Comment' AS SELECT a FROM t",
|
||||||
] {
|
] {
|
||||||
snowflake_and_generic()
|
snowflake()
|
||||||
.verified_stmt(sql);
|
.verified_stmt(sql);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3133,7 +3149,7 @@ fn view_comment_option_should_be_after_column_list() {
|
||||||
fn parse_view_column_descriptions() {
|
fn parse_view_column_descriptions() {
|
||||||
let sql = "CREATE OR REPLACE VIEW v (a COMMENT 'Comment', b) AS SELECT a, b FROM table1";
|
let sql = "CREATE OR REPLACE VIEW v (a COMMENT 'Comment', b) AS SELECT a, b FROM table1";
|
||||||
|
|
||||||
match snowflake_and_generic().verified_stmt(sql) {
|
match snowflake().verified_stmt(sql) {
|
||||||
Statement::CreateView { name, columns, .. } => {
|
Statement::CreateView { name, columns, .. } => {
|
||||||
assert_eq!(name.to_string(), "v");
|
assert_eq!(name.to_string(), "v");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -3142,7 +3158,9 @@ fn parse_view_column_descriptions() {
|
||||||
ViewColumnDef {
|
ViewColumnDef {
|
||||||
name: Ident::new("a"),
|
name: Ident::new("a"),
|
||||||
data_type: None,
|
data_type: None,
|
||||||
options: Some(vec![ColumnOption::Comment("Comment".to_string())]),
|
options: Some(ColumnOptions::SpaceSeparated(vec![ColumnOption::Comment(
|
||||||
|
"Comment".to_string()
|
||||||
|
)])),
|
||||||
},
|
},
|
||||||
ViewColumnDef {
|
ViewColumnDef {
|
||||||
name: Ident::new("b"),
|
name: Ident::new("b"),
|
||||||
|
@ -3397,10 +3415,38 @@ fn parse_ls_and_rm() {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_sql_keywords_as_select_item_ident() {
|
||||||
|
// Some keywords that should be parsed as an alias
|
||||||
|
let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT", "SORT"];
|
||||||
|
for kw in unreserved_kws {
|
||||||
|
snowflake().verified_stmt(&format!("SELECT 1, {kw}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Some keywords that should not be parsed as an alias
|
||||||
|
let reserved_kws = vec![
|
||||||
|
"FROM",
|
||||||
|
"GROUP",
|
||||||
|
"HAVING",
|
||||||
|
"INTERSECT",
|
||||||
|
"INTO",
|
||||||
|
"ORDER",
|
||||||
|
"SELECT",
|
||||||
|
"UNION",
|
||||||
|
"WHERE",
|
||||||
|
"WITH",
|
||||||
|
];
|
||||||
|
for kw in reserved_kws {
|
||||||
|
assert!(snowflake()
|
||||||
|
.parse_sql_statements(&format!("SELECT 1, {kw}"))
|
||||||
|
.is_err());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_sql_keywords_as_select_item_aliases() {
|
fn test_sql_keywords_as_select_item_aliases() {
|
||||||
// Some keywords that should be parsed as an alias
|
// Some keywords that should be parsed as an alias
|
||||||
let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT"];
|
let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT", "SORT"];
|
||||||
for kw in unreserved_kws {
|
for kw in unreserved_kws {
|
||||||
snowflake()
|
snowflake()
|
||||||
.one_statement_parses_to(&format!("SELECT 1 {kw}"), &format!("SELECT 1 AS {kw}"));
|
.one_statement_parses_to(&format!("SELECT 1 {kw}"), &format!("SELECT 1 AS {kw}"));
|
||||||
|
@ -3627,7 +3673,7 @@ fn test_alter_session_followed_by_statement() {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
match stmts[..] {
|
match stmts[..] {
|
||||||
[Statement::AlterSession { .. }, Statement::Query { .. }] => {}
|
[Statement::AlterSession { .. }, Statement::Query { .. }] => {}
|
||||||
_ => panic!("Unexpected statements: {:?}", stmts),
|
_ => panic!("Unexpected statements: {stmts:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4082,3 +4128,107 @@ fn parse_connect_by_root_operator() {
|
||||||
"sql parser error: Expected an expression, found: FROM"
|
"sql parser error: Expected an expression, found: FROM"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_begin_exception_end() {
|
||||||
|
for sql in [
|
||||||
|
"BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE; END",
|
||||||
|
"BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE EX_1; END",
|
||||||
|
"BEGIN SELECT 1; EXCEPTION WHEN FOO THEN SELECT 2; WHEN OTHER THEN SELECT 3; RAISE; END",
|
||||||
|
"BEGIN BEGIN SELECT 1; EXCEPTION WHEN OTHER THEN SELECT 2; RAISE; END; END",
|
||||||
|
] {
|
||||||
|
snowflake().verified_stmt(sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
let sql = r#"
|
||||||
|
DECLARE
|
||||||
|
EXCEPTION_1 EXCEPTION (-20001, 'I caught the expected exception.');
|
||||||
|
EXCEPTION_2 EXCEPTION (-20002, 'Not the expected exception!');
|
||||||
|
EXCEPTION_3 EXCEPTION (-20003, 'The worst exception...');
|
||||||
|
BEGIN
|
||||||
|
BEGIN
|
||||||
|
SELECT 1;
|
||||||
|
EXCEPTION
|
||||||
|
WHEN EXCEPTION_1 THEN
|
||||||
|
SELECT 1;
|
||||||
|
WHEN EXCEPTION_2 OR EXCEPTION_3 THEN
|
||||||
|
SELECT 2;
|
||||||
|
SELECT 3;
|
||||||
|
WHEN OTHER THEN
|
||||||
|
SELECT 4;
|
||||||
|
RAISE;
|
||||||
|
END;
|
||||||
|
END
|
||||||
|
"#;
|
||||||
|
|
||||||
|
// Outer `BEGIN` of the two nested `BEGIN` statements.
|
||||||
|
let Statement::StartTransaction { mut statements, .. } = snowflake()
|
||||||
|
.parse_sql_statements(sql)
|
||||||
|
.unwrap()
|
||||||
|
.pop()
|
||||||
|
.unwrap()
|
||||||
|
else {
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
|
||||||
|
// Inner `BEGIN` of the two nested `BEGIN` statements.
|
||||||
|
let Statement::StartTransaction {
|
||||||
|
statements,
|
||||||
|
exception,
|
||||||
|
has_end_keyword,
|
||||||
|
..
|
||||||
|
} = statements.pop().unwrap()
|
||||||
|
else {
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(1, statements.len());
|
||||||
|
assert!(has_end_keyword);
|
||||||
|
|
||||||
|
let exception = exception.unwrap();
|
||||||
|
assert_eq!(3, exception.len());
|
||||||
|
assert_eq!(1, exception[0].idents.len());
|
||||||
|
assert_eq!(1, exception[0].statements.len());
|
||||||
|
assert_eq!(2, exception[1].idents.len());
|
||||||
|
assert_eq!(2, exception[1].statements.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_snowflake_fetch_clause_syntax() {
|
||||||
|
let canonical = "SELECT c1 FROM fetch_test FETCH FIRST 2 ROWS ONLY";
|
||||||
|
snowflake().verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH 2", canonical);
|
||||||
|
|
||||||
|
snowflake()
|
||||||
|
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH FIRST 2", canonical);
|
||||||
|
snowflake()
|
||||||
|
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH NEXT 2", canonical);
|
||||||
|
|
||||||
|
snowflake()
|
||||||
|
.verified_only_select_with_canonical("SELECT c1 FROM fetch_test FETCH 2 ROW", canonical);
|
||||||
|
|
||||||
|
snowflake().verified_only_select_with_canonical(
|
||||||
|
"SELECT c1 FROM fetch_test FETCH FIRST 2 ROWS",
|
||||||
|
canonical,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_snowflake_create_view_with_multiple_column_options() {
|
||||||
|
let create_view_with_tag =
|
||||||
|
r#"CREATE VIEW X (COL WITH TAG (pii='email') COMMENT 'foobar') AS SELECT * FROM Y"#;
|
||||||
|
snowflake().verified_stmt(create_view_with_tag);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_snowflake_create_view_with_composite_tag() {
|
||||||
|
let create_view_with_tag =
|
||||||
|
r#"CREATE VIEW X (COL WITH TAG (foo.bar.baz.pii='email')) AS SELECT * FROM Y"#;
|
||||||
|
snowflake().verified_stmt(create_view_with_tag);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_snowflake_create_view_with_composite_policy_name() {
|
||||||
|
let create_view_with_tag =
|
||||||
|
r#"CREATE VIEW X (COL WITH MASKING POLICY foo.bar.baz) AS SELECT * FROM Y"#;
|
||||||
|
snowflake().verified_stmt(create_view_with_tag);
|
||||||
|
}
|
||||||
|
|
|
@ -324,7 +324,7 @@ fn parse_create_table_on_conflict_col() {
|
||||||
Keyword::IGNORE,
|
Keyword::IGNORE,
|
||||||
Keyword::REPLACE,
|
Keyword::REPLACE,
|
||||||
] {
|
] {
|
||||||
let sql = format!("CREATE TABLE t1 (a INT, b INT ON CONFLICT {:?})", keyword);
|
let sql = format!("CREATE TABLE t1 (a INT, b INT ON CONFLICT {keyword:?})");
|
||||||
match sqlite_and_generic().verified_stmt(&sql) {
|
match sqlite_and_generic().verified_stmt(&sql) {
|
||||||
Statement::CreateTable(CreateTable { columns, .. }) => {
|
Statement::CreateTable(CreateTable { columns, .. }) => {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -410,7 +410,7 @@ fn parse_window_function_with_filter() {
|
||||||
"count",
|
"count",
|
||||||
"user_defined_function",
|
"user_defined_function",
|
||||||
] {
|
] {
|
||||||
let sql = format!("SELECT {}(x) FILTER (WHERE y) OVER () FROM t", func_name);
|
let sql = format!("SELECT {func_name}(x) FILTER (WHERE y) OVER () FROM t");
|
||||||
let select = sqlite().verified_only_select(&sql);
|
let select = sqlite().verified_only_select(&sql);
|
||||||
assert_eq!(select.to_string(), sql);
|
assert_eq!(select.to_string(), sql);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -444,7 +444,7 @@ fn parse_window_function_with_filter() {
|
||||||
fn parse_attach_database() {
|
fn parse_attach_database() {
|
||||||
let sql = "ATTACH DATABASE 'test.db' AS test";
|
let sql = "ATTACH DATABASE 'test.db' AS test";
|
||||||
let verified_stmt = sqlite().verified_stmt(sql);
|
let verified_stmt = sqlite().verified_stmt(sql);
|
||||||
assert_eq!(sql, format!("{}", verified_stmt));
|
assert_eq!(sql, format!("{verified_stmt}"));
|
||||||
match verified_stmt {
|
match verified_stmt {
|
||||||
Statement::AttachDatabase {
|
Statement::AttachDatabase {
|
||||||
schema_name,
|
schema_name,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue