mirror of
https://github.com/apache/datafusion-sqlparser-rs.git
synced 2025-10-09 21:42:05 +00:00
Support more DateTimeField
variants (#1191)
This commit is contained in:
parent
8dd213cff2
commit
127be97369
6 changed files with 129 additions and 46 deletions
|
@ -229,7 +229,7 @@ impl fmt::Display for Interval {
|
|||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let value = self.value.as_ref();
|
||||
match (
|
||||
self.leading_field,
|
||||
&self.leading_field,
|
||||
self.leading_precision,
|
||||
self.fractional_seconds_precision,
|
||||
) {
|
||||
|
@ -248,13 +248,13 @@ impl fmt::Display for Interval {
|
|||
}
|
||||
_ => {
|
||||
write!(f, "INTERVAL {value}")?;
|
||||
if let Some(leading_field) = self.leading_field {
|
||||
if let Some(leading_field) = &self.leading_field {
|
||||
write!(f, " {leading_field}")?;
|
||||
}
|
||||
if let Some(leading_precision) = self.leading_precision {
|
||||
write!(f, " ({leading_precision})")?;
|
||||
}
|
||||
if let Some(last_field) = self.last_field {
|
||||
if let Some(last_field) = &self.last_field {
|
||||
write!(f, " TO {last_field}")?;
|
||||
}
|
||||
if let Some(fractional_seconds_precision) = self.fractional_seconds_precision {
|
||||
|
|
114
src/ast/value.rs
114
src/ast/value.rs
|
@ -12,6 +12,13 @@
|
|||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::string::String;
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::format;
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::string::ToString;
|
||||
|
||||
use core::fmt;
|
||||
|
||||
#[cfg(feature = "bigdecimal")]
|
||||
|
@ -20,6 +27,7 @@ use bigdecimal::BigDecimal;
|
|||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::ast::Ident;
|
||||
#[cfg(feature = "visitor")]
|
||||
use sqlparser_derive::{Visit, VisitMut};
|
||||
|
||||
|
@ -109,17 +117,25 @@ impl fmt::Display for DollarQuotedString {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
|
||||
pub enum DateTimeField {
|
||||
Year,
|
||||
Month,
|
||||
Week,
|
||||
/// Week optionally followed by a WEEKDAY.
|
||||
///
|
||||
/// ```sql
|
||||
/// WEEK(MONDAY)
|
||||
/// ```
|
||||
///
|
||||
/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/date_functions#extract)
|
||||
Week(Option<Ident>),
|
||||
Day,
|
||||
DayOfWeek,
|
||||
DayOfYear,
|
||||
Date,
|
||||
Datetime,
|
||||
Hour,
|
||||
Minute,
|
||||
Second,
|
||||
|
@ -148,47 +164,67 @@ pub enum DateTimeField {
|
|||
TimezoneMinute,
|
||||
TimezoneRegion,
|
||||
NoDateTime,
|
||||
/// Arbitrary abbreviation or custom date-time part.
|
||||
///
|
||||
/// ```sql
|
||||
/// EXTRACT(q FROM CURRENT_TIMESTAMP)
|
||||
/// ```
|
||||
/// [Snowflake](https://docs.snowflake.com/en/sql-reference/functions-date-time#supported-date-and-time-parts)
|
||||
Custom(Ident),
|
||||
}
|
||||
|
||||
impl fmt::Display for DateTimeField {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str(match self {
|
||||
DateTimeField::Year => "YEAR",
|
||||
DateTimeField::Month => "MONTH",
|
||||
DateTimeField::Week => "WEEK",
|
||||
DateTimeField::Day => "DAY",
|
||||
DateTimeField::DayOfWeek => "DAYOFWEEK",
|
||||
DateTimeField::DayOfYear => "DAYOFYEAR",
|
||||
DateTimeField::Date => "DATE",
|
||||
DateTimeField::Hour => "HOUR",
|
||||
DateTimeField::Minute => "MINUTE",
|
||||
DateTimeField::Second => "SECOND",
|
||||
DateTimeField::Century => "CENTURY",
|
||||
DateTimeField::Decade => "DECADE",
|
||||
DateTimeField::Dow => "DOW",
|
||||
DateTimeField::Doy => "DOY",
|
||||
DateTimeField::Epoch => "EPOCH",
|
||||
DateTimeField::Isodow => "ISODOW",
|
||||
DateTimeField::Isoyear => "ISOYEAR",
|
||||
DateTimeField::IsoWeek => "ISOWEEK",
|
||||
DateTimeField::Julian => "JULIAN",
|
||||
DateTimeField::Microsecond => "MICROSECOND",
|
||||
DateTimeField::Microseconds => "MICROSECONDS",
|
||||
DateTimeField::Millenium => "MILLENIUM",
|
||||
DateTimeField::Millennium => "MILLENNIUM",
|
||||
DateTimeField::Millisecond => "MILLISECOND",
|
||||
DateTimeField::Milliseconds => "MILLISECONDS",
|
||||
DateTimeField::Nanosecond => "NANOSECOND",
|
||||
DateTimeField::Nanoseconds => "NANOSECONDS",
|
||||
DateTimeField::Quarter => "QUARTER",
|
||||
DateTimeField::Time => "TIME",
|
||||
DateTimeField::Timezone => "TIMEZONE",
|
||||
DateTimeField::TimezoneAbbr => "TIMEZONE_ABBR",
|
||||
DateTimeField::TimezoneHour => "TIMEZONE_HOUR",
|
||||
DateTimeField::TimezoneMinute => "TIMEZONE_MINUTE",
|
||||
DateTimeField::TimezoneRegion => "TIMEZONE_REGION",
|
||||
DateTimeField::NoDateTime => "NODATETIME",
|
||||
})
|
||||
f.write_str(
|
||||
match self {
|
||||
DateTimeField::Year => "YEAR".to_string(),
|
||||
DateTimeField::Month => "MONTH".to_string(),
|
||||
DateTimeField::Week(week_day) => {
|
||||
format!(
|
||||
"WEEK{}",
|
||||
week_day
|
||||
.as_ref()
|
||||
.map(|w| format!("({w})"))
|
||||
.unwrap_or_default()
|
||||
)
|
||||
}
|
||||
DateTimeField::Day => "DAY".to_string(),
|
||||
DateTimeField::DayOfWeek => "DAYOFWEEK".to_string(),
|
||||
DateTimeField::DayOfYear => "DAYOFYEAR".to_string(),
|
||||
DateTimeField::Date => "DATE".to_string(),
|
||||
DateTimeField::Datetime => "DATETIME".to_string(),
|
||||
DateTimeField::Hour => "HOUR".to_string(),
|
||||
DateTimeField::Minute => "MINUTE".to_string(),
|
||||
DateTimeField::Second => "SECOND".to_string(),
|
||||
DateTimeField::Century => "CENTURY".to_string(),
|
||||
DateTimeField::Decade => "DECADE".to_string(),
|
||||
DateTimeField::Dow => "DOW".to_string(),
|
||||
DateTimeField::Doy => "DOY".to_string(),
|
||||
DateTimeField::Epoch => "EPOCH".to_string(),
|
||||
DateTimeField::Isodow => "ISODOW".to_string(),
|
||||
DateTimeField::Isoyear => "ISOYEAR".to_string(),
|
||||
DateTimeField::IsoWeek => "ISOWEEK".to_string(),
|
||||
DateTimeField::Julian => "JULIAN".to_string(),
|
||||
DateTimeField::Microsecond => "MICROSECOND".to_string(),
|
||||
DateTimeField::Microseconds => "MICROSECONDS".to_string(),
|
||||
DateTimeField::Millenium => "MILLENIUM".to_string(),
|
||||
DateTimeField::Millennium => "MILLENNIUM".to_string(),
|
||||
DateTimeField::Millisecond => "MILLISECOND".to_string(),
|
||||
DateTimeField::Milliseconds => "MILLISECONDS".to_string(),
|
||||
DateTimeField::Nanosecond => "NANOSECOND".to_string(),
|
||||
DateTimeField::Nanoseconds => "NANOSECONDS".to_string(),
|
||||
DateTimeField::Quarter => "QUARTER".to_string(),
|
||||
DateTimeField::Time => "TIME".to_string(),
|
||||
DateTimeField::Timezone => "TIMEZONE".to_string(),
|
||||
DateTimeField::TimezoneAbbr => "TIMEZONE_ABBR".to_string(),
|
||||
DateTimeField::TimezoneHour => "TIMEZONE_HOUR".to_string(),
|
||||
DateTimeField::TimezoneMinute => "TIMEZONE_MINUTE".to_string(),
|
||||
DateTimeField::TimezoneRegion => "TIMEZONE_REGION".to_string(),
|
||||
DateTimeField::NoDateTime => "NODATETIME".to_string(),
|
||||
DateTimeField::Custom(custom) => format!("{custom}"),
|
||||
}
|
||||
.as_str(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1857,11 +1857,23 @@ impl<'a> Parser<'a> {
|
|||
Token::Word(w) => match w.keyword {
|
||||
Keyword::YEAR => Ok(DateTimeField::Year),
|
||||
Keyword::MONTH => Ok(DateTimeField::Month),
|
||||
Keyword::WEEK => Ok(DateTimeField::Week),
|
||||
Keyword::WEEK => {
|
||||
let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
|
||||
&& self.consume_token(&Token::LParen)
|
||||
{
|
||||
let week_day = self.parse_identifier(false)?;
|
||||
self.expect_token(&Token::RParen)?;
|
||||
Some(week_day)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(DateTimeField::Week(week_day))
|
||||
}
|
||||
Keyword::DAY => Ok(DateTimeField::Day),
|
||||
Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
|
||||
Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
|
||||
Keyword::DATE => Ok(DateTimeField::Date),
|
||||
Keyword::DATETIME => Ok(DateTimeField::Datetime),
|
||||
Keyword::HOUR => Ok(DateTimeField::Hour),
|
||||
Keyword::MINUTE => Ok(DateTimeField::Minute),
|
||||
Keyword::SECOND => Ok(DateTimeField::Second),
|
||||
|
@ -1889,6 +1901,11 @@ impl<'a> Parser<'a> {
|
|||
Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
|
||||
Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
|
||||
Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
|
||||
_ if dialect_of!(self is SnowflakeDialect | GenericDialect) => {
|
||||
self.prev_token();
|
||||
let custom = self.parse_identifier(false)?;
|
||||
Ok(DateTimeField::Custom(custom))
|
||||
}
|
||||
_ => self.expected("date/time field", next_token),
|
||||
},
|
||||
_ => self.expected("date/time field", next_token),
|
||||
|
|
|
@ -1462,6 +1462,19 @@ fn test_bigquery_trim() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_extract_weekday() {
|
||||
let sql = "SELECT EXTRACT(WEEK(MONDAY) FROM d)";
|
||||
let select = bigquery_and_generic().verified_only_select(sql);
|
||||
assert_eq!(
|
||||
&Expr::Extract {
|
||||
field: DateTimeField::Week(Some(Ident::new("MONDAY"))),
|
||||
expr: Box::new(Expr::Identifier(Ident::new("d"))),
|
||||
},
|
||||
expr_from_projection(only(&select.projection)),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_as_struct() {
|
||||
bigquery().verified_only_select("SELECT * FROM (SELECT AS VALUE STRUCT(123 AS a, false AS b))");
|
||||
|
|
|
@ -2271,6 +2271,7 @@ fn parse_extract() {
|
|||
verified_stmt("SELECT EXTRACT(DAYOFWEEK FROM d)");
|
||||
verified_stmt("SELECT EXTRACT(DAYOFYEAR FROM d)");
|
||||
verified_stmt("SELECT EXTRACT(DATE FROM d)");
|
||||
verified_stmt("SELECT EXTRACT(DATETIME FROM d)");
|
||||
verified_stmt("SELECT EXTRACT(HOUR FROM d)");
|
||||
verified_stmt("SELECT EXTRACT(MINUTE FROM d)");
|
||||
verified_stmt("SELECT EXTRACT(SECOND FROM d)");
|
||||
|
@ -2300,7 +2301,8 @@ fn parse_extract() {
|
|||
verified_stmt("SELECT EXTRACT(TIMEZONE_REGION FROM d)");
|
||||
verified_stmt("SELECT EXTRACT(TIME FROM d)");
|
||||
|
||||
let res = parse_sql_statements("SELECT EXTRACT(JIFFY FROM d)");
|
||||
let dialects = all_dialects_except(|d| d.is::<SnowflakeDialect>() || d.is::<GenericDialect>());
|
||||
let res = dialects.parse_sql_statements("SELECT EXTRACT(JIFFY FROM d)");
|
||||
assert_eq!(
|
||||
ParserError::ParserError("Expected date/time field, found: JIFFY".to_string()),
|
||||
res.unwrap_err()
|
||||
|
@ -2338,7 +2340,8 @@ fn parse_ceil_datetime() {
|
|||
verified_stmt("SELECT CEIL(d TO SECOND) FROM df");
|
||||
verified_stmt("SELECT CEIL(d TO MILLISECOND) FROM df");
|
||||
|
||||
let res = parse_sql_statements("SELECT CEIL(d TO JIFFY) FROM df");
|
||||
let dialects = all_dialects_except(|d| d.is::<SnowflakeDialect>() || d.is::<GenericDialect>());
|
||||
let res = dialects.parse_sql_statements("SELECT CEIL(d TO JIFFY) FROM df");
|
||||
assert_eq!(
|
||||
ParserError::ParserError("Expected date/time field, found: JIFFY".to_string()),
|
||||
res.unwrap_err()
|
||||
|
@ -2364,7 +2367,8 @@ fn parse_floor_datetime() {
|
|||
verified_stmt("SELECT FLOOR(d TO SECOND) FROM df");
|
||||
verified_stmt("SELECT FLOOR(d TO MILLISECOND) FROM df");
|
||||
|
||||
let res = parse_sql_statements("SELECT FLOOR(d TO JIFFY) FROM df");
|
||||
let dialects = all_dialects_except(|d| d.is::<SnowflakeDialect>() || d.is::<GenericDialect>());
|
||||
let res = dialects.parse_sql_statements("SELECT FLOOR(d TO JIFFY) FROM df");
|
||||
assert_eq!(
|
||||
ParserError::ParserError("Expected date/time field, found: JIFFY".to_string()),
|
||||
res.unwrap_err()
|
||||
|
|
|
@ -1472,6 +1472,19 @@ fn parse_top() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_extract_custom_part() {
|
||||
let sql = "SELECT EXTRACT(eod FROM d)";
|
||||
let select = snowflake_and_generic().verified_only_select(sql);
|
||||
assert_eq!(
|
||||
&Expr::Extract {
|
||||
field: DateTimeField::Custom(Ident::new("eod")),
|
||||
expr: Box::new(Expr::Identifier(Ident::new("d"))),
|
||||
},
|
||||
expr_from_projection(only(&select.projection)),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_comma_outer_join() {
|
||||
// compound identifiers
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue