mirror of
https://github.com/apache/datafusion-sqlparser-rs.git
synced 2025-07-07 17:04:59 +00:00
Implement SnowFlake ALTER SESSION (#1712)
This commit is contained in:
parent
28736da235
commit
3ace97c0ef
8 changed files with 335 additions and 197 deletions
89
src/ast/helpers/key_value_options.rs
Normal file
89
src/ast/helpers/key_value_options.rs
Normal file
|
@ -0,0 +1,89 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
//! Key-value options for SQL statements.
|
||||
//! See [this page](https://docs.snowflake.com/en/sql-reference/commands-data-loading) for more details.
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::string::String;
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt;
|
||||
use core::fmt::Formatter;
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[cfg(feature = "visitor")]
|
||||
use sqlparser_derive::{Visit, VisitMut};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct KeyValueOptions {
|
||||
pub options: Vec<KeyValueOption>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum KeyValueOptionType {
|
||||
STRING,
|
||||
BOOLEAN,
|
||||
ENUM,
|
||||
NUMBER,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct KeyValueOption {
|
||||
pub option_name: String,
|
||||
pub option_type: KeyValueOptionType,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for KeyValueOptions {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
if !self.options.is_empty() {
|
||||
let mut first = false;
|
||||
for option in &self.options {
|
||||
if !first {
|
||||
first = true;
|
||||
} else {
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
write!(f, "{}", option)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for KeyValueOption {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self.option_type {
|
||||
KeyValueOptionType::STRING => {
|
||||
write!(f, "{}='{}'", self.option_name, self.value)?;
|
||||
}
|
||||
KeyValueOptionType::ENUM | KeyValueOptionType::BOOLEAN | KeyValueOptionType::NUMBER => {
|
||||
write!(f, "{}={}", self.option_name, self.value)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -15,5 +15,6 @@
|
|||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
pub mod attached_token;
|
||||
pub mod key_value_options;
|
||||
pub mod stmt_create_table;
|
||||
pub mod stmt_data_loading;
|
||||
|
|
|
@ -24,11 +24,11 @@ use alloc::string::String;
|
|||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec::Vec;
|
||||
use core::fmt;
|
||||
use core::fmt::Formatter;
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::ast::helpers::key_value_options::KeyValueOptions;
|
||||
use crate::ast::{Ident, ObjectName};
|
||||
#[cfg(feature = "visitor")]
|
||||
use sqlparser_derive::{Visit, VisitMut};
|
||||
|
@ -38,36 +38,10 @@ use sqlparser_derive::{Visit, VisitMut};
|
|||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct StageParamsObject {
|
||||
pub url: Option<String>,
|
||||
pub encryption: DataLoadingOptions,
|
||||
pub encryption: KeyValueOptions,
|
||||
pub endpoint: Option<String>,
|
||||
pub storage_integration: Option<String>,
|
||||
pub credentials: DataLoadingOptions,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct DataLoadingOptions {
|
||||
pub options: Vec<DataLoadingOption>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum DataLoadingOptionType {
|
||||
STRING,
|
||||
BOOLEAN,
|
||||
ENUM,
|
||||
NUMBER,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub struct DataLoadingOption {
|
||||
pub option_name: String,
|
||||
pub option_type: DataLoadingOptionType,
|
||||
pub value: String,
|
||||
pub credentials: KeyValueOptions,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
|
@ -106,39 +80,6 @@ impl fmt::Display for StageParamsObject {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for DataLoadingOptions {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
if !self.options.is_empty() {
|
||||
let mut first = false;
|
||||
for option in &self.options {
|
||||
if !first {
|
||||
first = true;
|
||||
} else {
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
write!(f, "{}", option)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for DataLoadingOption {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self.option_type {
|
||||
DataLoadingOptionType::STRING => {
|
||||
write!(f, "{}='{}'", self.option_name, self.value)?;
|
||||
}
|
||||
DataLoadingOptionType::ENUM
|
||||
| DataLoadingOptionType::BOOLEAN
|
||||
| DataLoadingOptionType::NUMBER => {
|
||||
write!(f, "{}={}", self.option_name, self.value)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for StageLoadSelectItem {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
if self.alias.is_some() {
|
||||
|
|
|
@ -89,9 +89,8 @@ pub use self::value::{
|
|||
NormalizationForm, TrimWhereField, Value,
|
||||
};
|
||||
|
||||
use crate::ast::helpers::stmt_data_loading::{
|
||||
DataLoadingOptions, StageLoadSelectItem, StageParamsObject,
|
||||
};
|
||||
use crate::ast::helpers::key_value_options::KeyValueOptions;
|
||||
use crate::ast::helpers::stmt_data_loading::{StageLoadSelectItem, StageParamsObject};
|
||||
#[cfg(feature = "visitor")]
|
||||
pub use visitor::*;
|
||||
|
||||
|
@ -2504,8 +2503,8 @@ pub enum Statement {
|
|||
from_query: Option<Box<Query>>,
|
||||
files: Option<Vec<String>>,
|
||||
pattern: Option<String>,
|
||||
file_format: DataLoadingOptions,
|
||||
copy_options: DataLoadingOptions,
|
||||
file_format: KeyValueOptions,
|
||||
copy_options: KeyValueOptions,
|
||||
validation_mode: Option<String>,
|
||||
partition: Option<Box<Expr>>,
|
||||
},
|
||||
|
@ -2713,6 +2712,17 @@ pub enum Statement {
|
|||
owner: Option<ddl::AlterConnectorOwner>,
|
||||
},
|
||||
/// ```sql
|
||||
/// ALTER SESSION SET sessionParam
|
||||
/// ALTER SESSION UNSET <param_name> [ , <param_name> , ... ]
|
||||
/// ```
|
||||
/// See <https://docs.snowflake.com/en/sql-reference/sql/alter-session>
|
||||
AlterSession {
|
||||
/// true is to set for the session parameters, false is to unset
|
||||
set: bool,
|
||||
/// The session parameters to set or unset
|
||||
session_params: KeyValueOptions,
|
||||
},
|
||||
/// ```sql
|
||||
/// ATTACH DATABASE 'path/to/file' AS alias
|
||||
/// ```
|
||||
/// (SQLite-specific)
|
||||
|
@ -3240,9 +3250,9 @@ pub enum Statement {
|
|||
if_not_exists: bool,
|
||||
name: ObjectName,
|
||||
stage_params: StageParamsObject,
|
||||
directory_table_params: DataLoadingOptions,
|
||||
file_format: DataLoadingOptions,
|
||||
copy_options: DataLoadingOptions,
|
||||
directory_table_params: KeyValueOptions,
|
||||
file_format: KeyValueOptions,
|
||||
copy_options: KeyValueOptions,
|
||||
comment: Option<String>,
|
||||
},
|
||||
/// ```sql
|
||||
|
@ -4467,6 +4477,29 @@ impl fmt::Display for Statement {
|
|||
}
|
||||
Ok(())
|
||||
}
|
||||
Statement::AlterSession {
|
||||
set,
|
||||
session_params,
|
||||
} => {
|
||||
write!(
|
||||
f,
|
||||
"ALTER SESSION {set}",
|
||||
set = if *set { "SET" } else { "UNSET" }
|
||||
)?;
|
||||
if !session_params.options.is_empty() {
|
||||
if *set {
|
||||
write!(f, " {}", session_params)?;
|
||||
} else {
|
||||
let options = session_params
|
||||
.options
|
||||
.iter()
|
||||
.map(|p| p.option_name.clone())
|
||||
.collect::<Vec<_>>();
|
||||
write!(f, " {}", display_separated(&options, ", "))?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Statement::Drop {
|
||||
object_type,
|
||||
if_exists,
|
||||
|
|
|
@ -430,6 +430,7 @@ impl Spanned for Statement {
|
|||
// These statements need to be implemented
|
||||
Statement::AlterType { .. } => Span::empty(),
|
||||
Statement::AlterRole { .. } => Span::empty(),
|
||||
Statement::AlterSession { .. } => Span::empty(),
|
||||
Statement::AttachDatabase { .. } => Span::empty(),
|
||||
Statement::AttachDuckDBDatabase { .. } => Span::empty(),
|
||||
Statement::DetachDuckDBDatabase { .. } => Span::empty(),
|
||||
|
|
|
@ -17,10 +17,10 @@
|
|||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use crate::alloc::string::ToString;
|
||||
use crate::ast::helpers::key_value_options::{KeyValueOption, KeyValueOptionType, KeyValueOptions};
|
||||
use crate::ast::helpers::stmt_create_table::CreateTableBuilder;
|
||||
use crate::ast::helpers::stmt_data_loading::{
|
||||
DataLoadingOption, DataLoadingOptionType, DataLoadingOptions, FileStagingCommand,
|
||||
StageLoadSelectItem, StageParamsObject,
|
||||
FileStagingCommand, StageLoadSelectItem, StageParamsObject,
|
||||
};
|
||||
use crate::ast::{
|
||||
ColumnOption, ColumnPolicy, ColumnPolicyProperty, CopyIntoSnowflakeKind, Ident,
|
||||
|
@ -130,6 +130,16 @@ impl Dialect for SnowflakeDialect {
|
|||
}
|
||||
|
||||
fn parse_statement(&self, parser: &mut Parser) -> Option<Result<Statement, ParserError>> {
|
||||
if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) {
|
||||
// ALTER SESSION
|
||||
let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) {
|
||||
Some(Keyword::SET) => true,
|
||||
Some(Keyword::UNSET) => false,
|
||||
_ => return Some(parser.expected("SET or UNSET", parser.peek_token())),
|
||||
};
|
||||
return Some(parse_alter_session(parser, set));
|
||||
}
|
||||
|
||||
if parser.parse_keyword(Keyword::CREATE) {
|
||||
// possibly CREATE STAGE
|
||||
//[ OR REPLACE ]
|
||||
|
@ -358,6 +368,18 @@ fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result<Statem
|
|||
}
|
||||
}
|
||||
|
||||
/// Parse snowflake alter session.
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/alter-session>
|
||||
fn parse_alter_session(parser: &mut Parser, set: bool) -> Result<Statement, ParserError> {
|
||||
let session_options = parse_session_options(parser, set)?;
|
||||
Ok(Statement::AlterSession {
|
||||
set,
|
||||
session_params: KeyValueOptions {
|
||||
options: session_options,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse snowflake create table statement.
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-table>
|
||||
/// <https://docs.snowflake.com/en/sql-reference/sql/create-iceberg-table>
|
||||
|
@ -634,13 +656,13 @@ pub fn parse_create_stage(
|
|||
if_not_exists,
|
||||
name,
|
||||
stage_params,
|
||||
directory_table_params: DataLoadingOptions {
|
||||
directory_table_params: KeyValueOptions {
|
||||
options: directory_table_params,
|
||||
},
|
||||
file_format: DataLoadingOptions {
|
||||
file_format: KeyValueOptions {
|
||||
options: file_format,
|
||||
},
|
||||
copy_options: DataLoadingOptions {
|
||||
copy_options: KeyValueOptions {
|
||||
options: copy_options,
|
||||
},
|
||||
comment,
|
||||
|
@ -708,10 +730,10 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
|
|||
let mut from_stage = None;
|
||||
let mut stage_params = StageParamsObject {
|
||||
url: None,
|
||||
encryption: DataLoadingOptions { options: vec![] },
|
||||
encryption: KeyValueOptions { options: vec![] },
|
||||
endpoint: None,
|
||||
storage_integration: None,
|
||||
credentials: DataLoadingOptions { options: vec![] },
|
||||
credentials: KeyValueOptions { options: vec![] },
|
||||
};
|
||||
let mut from_query = None;
|
||||
let mut partition = None;
|
||||
|
@ -818,7 +840,7 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
|
|||
Token::Comma => continue,
|
||||
// In `COPY INTO <location>` the copy options do not have a shared key
|
||||
// like in `COPY INTO <table>`
|
||||
Token::Word(key) => copy_options.push(parse_copy_option(parser, key)?),
|
||||
Token::Word(key) => copy_options.push(parse_option(parser, key)?),
|
||||
_ => return parser.expected("another copy option, ; or EOF'", parser.peek_token()),
|
||||
}
|
||||
}
|
||||
|
@ -834,10 +856,10 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
|
|||
from_query,
|
||||
files: if files.is_empty() { None } else { Some(files) },
|
||||
pattern,
|
||||
file_format: DataLoadingOptions {
|
||||
file_format: KeyValueOptions {
|
||||
options: file_format,
|
||||
},
|
||||
copy_options: DataLoadingOptions {
|
||||
copy_options: KeyValueOptions {
|
||||
options: copy_options,
|
||||
},
|
||||
validation_mode,
|
||||
|
@ -931,8 +953,8 @@ fn parse_select_items_for_data_load(
|
|||
|
||||
fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserError> {
|
||||
let (mut url, mut storage_integration, mut endpoint) = (None, None, None);
|
||||
let mut encryption: DataLoadingOptions = DataLoadingOptions { options: vec![] };
|
||||
let mut credentials: DataLoadingOptions = DataLoadingOptions { options: vec![] };
|
||||
let mut encryption: KeyValueOptions = KeyValueOptions { options: vec![] };
|
||||
let mut credentials: KeyValueOptions = KeyValueOptions { options: vec![] };
|
||||
|
||||
// URL
|
||||
if parser.parse_keyword(Keyword::URL) {
|
||||
|
@ -961,7 +983,7 @@ fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserEr
|
|||
// CREDENTIALS
|
||||
if parser.parse_keyword(Keyword::CREDENTIALS) {
|
||||
parser.expect_token(&Token::Eq)?;
|
||||
credentials = DataLoadingOptions {
|
||||
credentials = KeyValueOptions {
|
||||
options: parse_parentheses_options(parser)?,
|
||||
};
|
||||
}
|
||||
|
@ -969,7 +991,7 @@ fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserEr
|
|||
// ENCRYPTION
|
||||
if parser.parse_keyword(Keyword::ENCRYPTION) {
|
||||
parser.expect_token(&Token::Eq)?;
|
||||
encryption = DataLoadingOptions {
|
||||
encryption = KeyValueOptions {
|
||||
options: parse_parentheses_options(parser)?,
|
||||
};
|
||||
}
|
||||
|
@ -983,20 +1005,63 @@ fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserEr
|
|||
})
|
||||
}
|
||||
|
||||
/// Parses options separated by blank spaces, commas, or new lines like:
|
||||
/// ABORT_DETACHED_QUERY = { TRUE | FALSE }
|
||||
/// [ ACTIVE_PYTHON_PROFILER = { 'LINE' | 'MEMORY' } ]
|
||||
/// [ BINARY_INPUT_FORMAT = '\<string\>' ]
|
||||
fn parse_session_options(
|
||||
parser: &mut Parser,
|
||||
set: bool,
|
||||
) -> Result<Vec<KeyValueOption>, ParserError> {
|
||||
let mut options: Vec<KeyValueOption> = Vec::new();
|
||||
let empty = String::new;
|
||||
loop {
|
||||
match parser.next_token().token {
|
||||
Token::Comma => continue,
|
||||
Token::Word(key) => {
|
||||
if set {
|
||||
let option = parse_option(parser, key)?;
|
||||
options.push(option);
|
||||
} else {
|
||||
options.push(KeyValueOption {
|
||||
option_name: key.value,
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: empty(),
|
||||
});
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if parser.peek_token().token == Token::EOF {
|
||||
break;
|
||||
}
|
||||
return parser.expected("another option", parser.peek_token());
|
||||
}
|
||||
}
|
||||
}
|
||||
options
|
||||
.is_empty()
|
||||
.then(|| {
|
||||
Err(ParserError::ParserError(
|
||||
"expected at least one option".to_string(),
|
||||
))
|
||||
})
|
||||
.unwrap_or(Ok(options))
|
||||
}
|
||||
|
||||
/// Parses options provided within parentheses like:
|
||||
/// ( ENABLE = { TRUE | FALSE }
|
||||
/// [ AUTO_REFRESH = { TRUE | FALSE } ]
|
||||
/// [ REFRESH_ON_CREATE = { TRUE | FALSE } ]
|
||||
/// [ NOTIFICATION_INTEGRATION = '<notification_integration_name>' ] )
|
||||
///
|
||||
fn parse_parentheses_options(parser: &mut Parser) -> Result<Vec<DataLoadingOption>, ParserError> {
|
||||
let mut options: Vec<DataLoadingOption> = Vec::new();
|
||||
fn parse_parentheses_options(parser: &mut Parser) -> Result<Vec<KeyValueOption>, ParserError> {
|
||||
let mut options: Vec<KeyValueOption> = Vec::new();
|
||||
parser.expect_token(&Token::LParen)?;
|
||||
loop {
|
||||
match parser.next_token().token {
|
||||
Token::RParen => break,
|
||||
Token::Comma => continue,
|
||||
Token::Word(key) => options.push(parse_copy_option(parser, key)?),
|
||||
Token::Word(key) => options.push(parse_option(parser, key)?),
|
||||
_ => return parser.expected("another option or ')'", parser.peek_token()),
|
||||
};
|
||||
}
|
||||
|
@ -1004,35 +1069,35 @@ fn parse_parentheses_options(parser: &mut Parser) -> Result<Vec<DataLoadingOptio
|
|||
}
|
||||
|
||||
/// Parses a `KEY = VALUE` construct based on the specified key
|
||||
fn parse_copy_option(parser: &mut Parser, key: Word) -> Result<DataLoadingOption, ParserError> {
|
||||
fn parse_option(parser: &mut Parser, key: Word) -> Result<KeyValueOption, ParserError> {
|
||||
parser.expect_token(&Token::Eq)?;
|
||||
if parser.parse_keyword(Keyword::TRUE) {
|
||||
Ok(DataLoadingOption {
|
||||
Ok(KeyValueOption {
|
||||
option_name: key.value,
|
||||
option_type: DataLoadingOptionType::BOOLEAN,
|
||||
option_type: KeyValueOptionType::BOOLEAN,
|
||||
value: "TRUE".to_string(),
|
||||
})
|
||||
} else if parser.parse_keyword(Keyword::FALSE) {
|
||||
Ok(DataLoadingOption {
|
||||
Ok(KeyValueOption {
|
||||
option_name: key.value,
|
||||
option_type: DataLoadingOptionType::BOOLEAN,
|
||||
option_type: KeyValueOptionType::BOOLEAN,
|
||||
value: "FALSE".to_string(),
|
||||
})
|
||||
} else {
|
||||
match parser.next_token().token {
|
||||
Token::SingleQuotedString(value) => Ok(DataLoadingOption {
|
||||
Token::SingleQuotedString(value) => Ok(KeyValueOption {
|
||||
option_name: key.value,
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value,
|
||||
}),
|
||||
Token::Word(word) => Ok(DataLoadingOption {
|
||||
Token::Word(word) => Ok(KeyValueOption {
|
||||
option_name: key.value,
|
||||
option_type: DataLoadingOptionType::ENUM,
|
||||
option_type: KeyValueOptionType::ENUM,
|
||||
value: word.value,
|
||||
}),
|
||||
Token::Number(n, _) => Ok(DataLoadingOption {
|
||||
Token::Number(n, _) => Ok(KeyValueOption {
|
||||
option_name: key.value,
|
||||
option_type: DataLoadingOptionType::NUMBER,
|
||||
option_type: KeyValueOptionType::NUMBER,
|
||||
value: n,
|
||||
}),
|
||||
_ => parser.expected("expected option value", parser.peek_token()),
|
||||
|
|
|
@ -913,6 +913,7 @@ define_keywords!(
|
|||
UNNEST,
|
||||
UNPIVOT,
|
||||
UNSAFE,
|
||||
UNSET,
|
||||
UNSIGNED,
|
||||
UNTIL,
|
||||
UPDATE,
|
||||
|
|
|
@ -19,9 +19,8 @@
|
|||
//! Test SQL syntax specific to Snowflake. The parser based on the
|
||||
//! generic dialect is also tested (on the inputs it can handle).
|
||||
|
||||
use sqlparser::ast::helpers::stmt_data_loading::{
|
||||
DataLoadingOption, DataLoadingOptionType, StageLoadSelectItem,
|
||||
};
|
||||
use sqlparser::ast::helpers::key_value_options::{KeyValueOption, KeyValueOptionType};
|
||||
use sqlparser::ast::helpers::stmt_data_loading::StageLoadSelectItem;
|
||||
use sqlparser::ast::*;
|
||||
use sqlparser::dialect::{Dialect, GenericDialect, SnowflakeDialect};
|
||||
use sqlparser::parser::{ParserError, ParserOptions};
|
||||
|
@ -1914,38 +1913,26 @@ fn test_create_stage_with_stage_params() {
|
|||
"<s3_api_compatible_endpoint>",
|
||||
stage_params.endpoint.unwrap()
|
||||
);
|
||||
assert!(stage_params
|
||||
.credentials
|
||||
.options
|
||||
.contains(&DataLoadingOption {
|
||||
option_name: "AWS_KEY_ID".to_string(),
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
value: "1a2b3c".to_string()
|
||||
}));
|
||||
assert!(stage_params
|
||||
.credentials
|
||||
.options
|
||||
.contains(&DataLoadingOption {
|
||||
option_name: "AWS_SECRET_KEY".to_string(),
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
value: "4x5y6z".to_string()
|
||||
}));
|
||||
assert!(stage_params
|
||||
.encryption
|
||||
.options
|
||||
.contains(&DataLoadingOption {
|
||||
option_name: "MASTER_KEY".to_string(),
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
value: "key".to_string()
|
||||
}));
|
||||
assert!(stage_params
|
||||
.encryption
|
||||
.options
|
||||
.contains(&DataLoadingOption {
|
||||
option_name: "TYPE".to_string(),
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
value: "AWS_SSE_KMS".to_string()
|
||||
}));
|
||||
assert!(stage_params.credentials.options.contains(&KeyValueOption {
|
||||
option_name: "AWS_KEY_ID".to_string(),
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: "1a2b3c".to_string()
|
||||
}));
|
||||
assert!(stage_params.credentials.options.contains(&KeyValueOption {
|
||||
option_name: "AWS_SECRET_KEY".to_string(),
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: "4x5y6z".to_string()
|
||||
}));
|
||||
assert!(stage_params.encryption.options.contains(&KeyValueOption {
|
||||
option_name: "MASTER_KEY".to_string(),
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: "key".to_string()
|
||||
}));
|
||||
assert!(stage_params.encryption.options.contains(&KeyValueOption {
|
||||
option_name: "TYPE".to_string(),
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: "AWS_SSE_KMS".to_string()
|
||||
}));
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
@ -1966,19 +1953,19 @@ fn test_create_stage_with_directory_table_params() {
|
|||
directory_table_params,
|
||||
..
|
||||
} => {
|
||||
assert!(directory_table_params.options.contains(&DataLoadingOption {
|
||||
assert!(directory_table_params.options.contains(&KeyValueOption {
|
||||
option_name: "ENABLE".to_string(),
|
||||
option_type: DataLoadingOptionType::BOOLEAN,
|
||||
option_type: KeyValueOptionType::BOOLEAN,
|
||||
value: "TRUE".to_string()
|
||||
}));
|
||||
assert!(directory_table_params.options.contains(&DataLoadingOption {
|
||||
assert!(directory_table_params.options.contains(&KeyValueOption {
|
||||
option_name: "REFRESH_ON_CREATE".to_string(),
|
||||
option_type: DataLoadingOptionType::BOOLEAN,
|
||||
option_type: KeyValueOptionType::BOOLEAN,
|
||||
value: "FALSE".to_string()
|
||||
}));
|
||||
assert!(directory_table_params.options.contains(&DataLoadingOption {
|
||||
assert!(directory_table_params.options.contains(&KeyValueOption {
|
||||
option_name: "NOTIFICATION_INTEGRATION".to_string(),
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: "some-string".to_string()
|
||||
}));
|
||||
}
|
||||
|
@ -1997,19 +1984,19 @@ fn test_create_stage_with_file_format() {
|
|||
|
||||
match snowflake_without_unescape().verified_stmt(sql) {
|
||||
Statement::CreateStage { file_format, .. } => {
|
||||
assert!(file_format.options.contains(&DataLoadingOption {
|
||||
assert!(file_format.options.contains(&KeyValueOption {
|
||||
option_name: "COMPRESSION".to_string(),
|
||||
option_type: DataLoadingOptionType::ENUM,
|
||||
option_type: KeyValueOptionType::ENUM,
|
||||
value: "AUTO".to_string()
|
||||
}));
|
||||
assert!(file_format.options.contains(&DataLoadingOption {
|
||||
assert!(file_format.options.contains(&KeyValueOption {
|
||||
option_name: "BINARY_FORMAT".to_string(),
|
||||
option_type: DataLoadingOptionType::ENUM,
|
||||
option_type: KeyValueOptionType::ENUM,
|
||||
value: "HEX".to_string()
|
||||
}));
|
||||
assert!(file_format.options.contains(&DataLoadingOption {
|
||||
assert!(file_format.options.contains(&KeyValueOption {
|
||||
option_name: "ESCAPE".to_string(),
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: r#"\\"#.to_string()
|
||||
}));
|
||||
}
|
||||
|
@ -2030,14 +2017,14 @@ fn test_create_stage_with_copy_options() {
|
|||
);
|
||||
match snowflake().verified_stmt(sql) {
|
||||
Statement::CreateStage { copy_options, .. } => {
|
||||
assert!(copy_options.options.contains(&DataLoadingOption {
|
||||
assert!(copy_options.options.contains(&KeyValueOption {
|
||||
option_name: "ON_ERROR".to_string(),
|
||||
option_type: DataLoadingOptionType::ENUM,
|
||||
option_type: KeyValueOptionType::ENUM,
|
||||
value: "CONTINUE".to_string()
|
||||
}));
|
||||
assert!(copy_options.options.contains(&DataLoadingOption {
|
||||
assert!(copy_options.options.contains(&KeyValueOption {
|
||||
option_name: "FORCE".to_string(),
|
||||
option_type: DataLoadingOptionType::BOOLEAN,
|
||||
option_type: KeyValueOptionType::BOOLEAN,
|
||||
value: "TRUE".to_string()
|
||||
}));
|
||||
}
|
||||
|
@ -2167,38 +2154,26 @@ fn test_copy_into_with_stage_params() {
|
|||
"<s3_api_compatible_endpoint>",
|
||||
stage_params.endpoint.unwrap()
|
||||
);
|
||||
assert!(stage_params
|
||||
.credentials
|
||||
.options
|
||||
.contains(&DataLoadingOption {
|
||||
option_name: "AWS_KEY_ID".to_string(),
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
value: "1a2b3c".to_string()
|
||||
}));
|
||||
assert!(stage_params
|
||||
.credentials
|
||||
.options
|
||||
.contains(&DataLoadingOption {
|
||||
option_name: "AWS_SECRET_KEY".to_string(),
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
value: "4x5y6z".to_string()
|
||||
}));
|
||||
assert!(stage_params
|
||||
.encryption
|
||||
.options
|
||||
.contains(&DataLoadingOption {
|
||||
option_name: "MASTER_KEY".to_string(),
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
value: "key".to_string()
|
||||
}));
|
||||
assert!(stage_params
|
||||
.encryption
|
||||
.options
|
||||
.contains(&DataLoadingOption {
|
||||
option_name: "TYPE".to_string(),
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
value: "AWS_SSE_KMS".to_string()
|
||||
}));
|
||||
assert!(stage_params.credentials.options.contains(&KeyValueOption {
|
||||
option_name: "AWS_KEY_ID".to_string(),
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: "1a2b3c".to_string()
|
||||
}));
|
||||
assert!(stage_params.credentials.options.contains(&KeyValueOption {
|
||||
option_name: "AWS_SECRET_KEY".to_string(),
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: "4x5y6z".to_string()
|
||||
}));
|
||||
assert!(stage_params.encryption.options.contains(&KeyValueOption {
|
||||
option_name: "MASTER_KEY".to_string(),
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: "key".to_string()
|
||||
}));
|
||||
assert!(stage_params.encryption.options.contains(&KeyValueOption {
|
||||
option_name: "TYPE".to_string(),
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: "AWS_SSE_KMS".to_string()
|
||||
}));
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
@ -2326,19 +2301,19 @@ fn test_copy_into_file_format() {
|
|||
|
||||
match snowflake_without_unescape().verified_stmt(sql) {
|
||||
Statement::CopyIntoSnowflake { file_format, .. } => {
|
||||
assert!(file_format.options.contains(&DataLoadingOption {
|
||||
assert!(file_format.options.contains(&KeyValueOption {
|
||||
option_name: "COMPRESSION".to_string(),
|
||||
option_type: DataLoadingOptionType::ENUM,
|
||||
option_type: KeyValueOptionType::ENUM,
|
||||
value: "AUTO".to_string()
|
||||
}));
|
||||
assert!(file_format.options.contains(&DataLoadingOption {
|
||||
assert!(file_format.options.contains(&KeyValueOption {
|
||||
option_name: "BINARY_FORMAT".to_string(),
|
||||
option_type: DataLoadingOptionType::ENUM,
|
||||
option_type: KeyValueOptionType::ENUM,
|
||||
value: "HEX".to_string()
|
||||
}));
|
||||
assert!(file_format.options.contains(&DataLoadingOption {
|
||||
assert!(file_format.options.contains(&KeyValueOption {
|
||||
option_name: "ESCAPE".to_string(),
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: r#"\\"#.to_string()
|
||||
}));
|
||||
}
|
||||
|
@ -2365,19 +2340,19 @@ fn test_copy_into_file_format() {
|
|||
.unwrap()
|
||||
{
|
||||
Statement::CopyIntoSnowflake { file_format, .. } => {
|
||||
assert!(file_format.options.contains(&DataLoadingOption {
|
||||
assert!(file_format.options.contains(&KeyValueOption {
|
||||
option_name: "COMPRESSION".to_string(),
|
||||
option_type: DataLoadingOptionType::ENUM,
|
||||
option_type: KeyValueOptionType::ENUM,
|
||||
value: "AUTO".to_string()
|
||||
}));
|
||||
assert!(file_format.options.contains(&DataLoadingOption {
|
||||
assert!(file_format.options.contains(&KeyValueOption {
|
||||
option_name: "BINARY_FORMAT".to_string(),
|
||||
option_type: DataLoadingOptionType::ENUM,
|
||||
option_type: KeyValueOptionType::ENUM,
|
||||
value: "HEX".to_string()
|
||||
}));
|
||||
assert!(file_format.options.contains(&DataLoadingOption {
|
||||
assert!(file_format.options.contains(&KeyValueOption {
|
||||
option_name: "ESCAPE".to_string(),
|
||||
option_type: DataLoadingOptionType::STRING,
|
||||
option_type: KeyValueOptionType::STRING,
|
||||
value: r#"\\"#.to_string()
|
||||
}));
|
||||
}
|
||||
|
@ -2397,14 +2372,14 @@ fn test_copy_into_copy_options() {
|
|||
|
||||
match snowflake().verified_stmt(sql) {
|
||||
Statement::CopyIntoSnowflake { copy_options, .. } => {
|
||||
assert!(copy_options.options.contains(&DataLoadingOption {
|
||||
assert!(copy_options.options.contains(&KeyValueOption {
|
||||
option_name: "ON_ERROR".to_string(),
|
||||
option_type: DataLoadingOptionType::ENUM,
|
||||
option_type: KeyValueOptionType::ENUM,
|
||||
value: "CONTINUE".to_string()
|
||||
}));
|
||||
assert!(copy_options.options.contains(&DataLoadingOption {
|
||||
assert!(copy_options.options.contains(&KeyValueOption {
|
||||
option_name: "FORCE".to_string(),
|
||||
option_type: DataLoadingOptionType::BOOLEAN,
|
||||
option_type: KeyValueOptionType::BOOLEAN,
|
||||
value: "TRUE".to_string()
|
||||
}));
|
||||
}
|
||||
|
@ -3475,3 +3450,35 @@ fn test_grant_database_role_to() {
|
|||
snowflake_and_generic().verified_stmt("GRANT DATABASE ROLE r1 TO ROLE r2");
|
||||
snowflake_and_generic().verified_stmt("GRANT DATABASE ROLE db1.sc1.r1 TO ROLE db1.sc1.r2");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_alter_session() {
|
||||
assert_eq!(
|
||||
snowflake()
|
||||
.parse_sql_statements("ALTER SESSION SET")
|
||||
.unwrap_err()
|
||||
.to_string(),
|
||||
"sql parser error: expected at least one option"
|
||||
);
|
||||
assert_eq!(
|
||||
snowflake()
|
||||
.parse_sql_statements("ALTER SESSION UNSET")
|
||||
.unwrap_err()
|
||||
.to_string(),
|
||||
"sql parser error: expected at least one option"
|
||||
);
|
||||
|
||||
snowflake().verified_stmt("ALTER SESSION SET AUTOCOMMIT=TRUE");
|
||||
snowflake().verified_stmt("ALTER SESSION SET AUTOCOMMIT=FALSE QUERY_TAG='tag'");
|
||||
snowflake().verified_stmt("ALTER SESSION UNSET AUTOCOMMIT");
|
||||
snowflake().verified_stmt("ALTER SESSION UNSET AUTOCOMMIT, QUERY_TAG");
|
||||
snowflake().one_statement_parses_to(
|
||||
"ALTER SESSION SET A=false, B='tag';",
|
||||
"ALTER SESSION SET A=FALSE B='tag'",
|
||||
);
|
||||
snowflake().one_statement_parses_to(
|
||||
"ALTER SESSION SET A=true \nB='tag'",
|
||||
"ALTER SESSION SET A=TRUE B='tag'",
|
||||
);
|
||||
snowflake().one_statement_parses_to("ALTER SESSION UNSET a\nB", "ALTER SESSION UNSET a, B");
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue