Implement Spanned to retrieve source locations on AST nodes (#1435)

Co-authored-by: Ifeanyi Ubah <ify1992@yahoo.com>
Co-authored-by: Andrew Lamb <andrew@nerdnetworks.org>
This commit is contained in:
Mark-Oliver Junge 2024-11-26 17:22:30 +01:00 committed by GitHub
parent 0adec33b94
commit 3c8fd74804
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 3092 additions and 399 deletions

View file

@ -100,6 +100,23 @@ similar semantics are represented with the same AST. We welcome PRs to fix such
issues and distinguish different syntaxes in the AST. issues and distinguish different syntaxes in the AST.
## WIP: Extracting source locations from AST nodes
This crate allows recovering source locations from AST nodes via the [Spanned](https://docs.rs/sqlparser/latest/sqlparser/ast/trait.Spanned.html) trait, which can be used for advanced diagnostics tooling. Note that this feature is a work in progress and many nodes report missing or inaccurate spans. Please see [this document](./docs/source_spans.md#source-span-contributing-guidelines) for information on how to contribute missing improvements.
```rust
use sqlparser::ast::Spanned;
// Parse SQL
let ast = Parser::parse_sql(&GenericDialect, "SELECT A FROM B").unwrap();
// The source span can be retrieved with start and end locations
assert_eq!(ast[0].span(), Span {
start: Location::of(1, 1),
end: Location::of(1, 16),
});
```
## SQL compliance ## SQL compliance
SQL was first standardized in 1987, and revisions of the standard have been SQL was first standardized in 1987, and revisions of the standard have been

52
docs/source_spans.md Normal file
View file

@ -0,0 +1,52 @@
## Breaking Changes
These are the current breaking changes introduced by the source spans feature:
#### Added fields for spans (must be added to any existing pattern matches)
- `Ident` now stores a `Span`
- `Select`, `With`, `Cte`, `WildcardAdditionalOptions` now store a `TokenWithLocation`
#### Misc.
- `TokenWithLocation` stores a full `Span`, rather than just a source location. Users relying on `token.location` should use `token.location.start` instead.
## Source Span Contributing Guidelines
For contributing source spans improvement in addition to the general [contribution guidelines](../README.md#contributing), please make sure to pay attention to the following:
### Source Span Design Considerations
- `Ident` always have correct source spans
- Downstream breaking change impact is to be as minimal as possible
- To this end, use recursive merging of spans in favor of storing spans on all nodes
- Any metadata added to compute spans must not change semantics (Eq, Ord, Hash, etc.)
The primary reason for missing and inaccurate source spans at this time is missing spans of keyword tokens and values in many structures, either due to lack of time or because adding them would break downstream significantly.
When considering adding support for source spans on a type, consider the impact to consumers of that type and whether your change would require a consumer to do non-trivial changes to their code.
Example of a trivial change
```rust
match node {
ast::Query {
field1,
field2,
location: _, // add a new line to ignored location
}
```
If adding source spans to a type would require a significant change like wrapping that type or similar, please open an issue to discuss.
### AST Node Equality and Hashes
When adding tokens to AST nodes, make sure to store them using the [AttachedToken](https://docs.rs/sqlparser/latest/sqlparser/ast/helpers/struct.AttachedToken.html) helper to ensure that semantically equivalent AST nodes always compare as equal and hash to the same value. F.e. `select 5` and `SELECT 5` would compare as different `Select` nodes, if the select token was stored directly. f.e.
```rust
struct Select {
select_token: AttachedToken, // only used for spans
/// remaining fields
field1,
field2,
...
}
```

View file

@ -0,0 +1,82 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use core::cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd};
use core::fmt::{self, Debug, Formatter};
use core::hash::{Hash, Hasher};
use crate::tokenizer::{Token, TokenWithLocation};
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
#[cfg(feature = "visitor")]
use sqlparser_derive::{Visit, VisitMut};
/// A wrapper type for attaching tokens to AST nodes that should be ignored in comparisons and hashing.
/// This should be used when a token is not relevant for semantics, but is still needed for
/// accurate source location tracking.
#[derive(Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct AttachedToken(pub TokenWithLocation);
impl AttachedToken {
pub fn empty() -> Self {
AttachedToken(TokenWithLocation::wrap(Token::EOF))
}
}
// Conditional Implementations
impl Debug for AttachedToken {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
// Blanket Implementations
impl PartialEq for AttachedToken {
fn eq(&self, _: &Self) -> bool {
true
}
}
impl Eq for AttachedToken {}
impl PartialOrd for AttachedToken {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for AttachedToken {
fn cmp(&self, _: &Self) -> Ordering {
Ordering::Equal
}
}
impl Hash for AttachedToken {
fn hash<H: Hasher>(&self, _state: &mut H) {
// Do nothing
}
}
impl From<TokenWithLocation> for AttachedToken {
fn from(value: TokenWithLocation) -> Self {
AttachedToken(value)
}
}

View file

@ -14,5 +14,6 @@
// KIND, either express or implied. See the License for the // KIND, either express or implied. See the License for the
// specific language governing permissions and limitations // specific language governing permissions and limitations
// under the License. // under the License.
pub mod attached_token;
pub mod stmt_create_table; pub mod stmt_create_table;
pub mod stmt_data_loading; pub mod stmt_data_loading;

View file

@ -23,9 +23,13 @@ use alloc::{
string::{String, ToString}, string::{String, ToString},
vec::Vec, vec::Vec,
}; };
use helpers::attached_token::AttachedToken;
use core::fmt::{self, Display};
use core::ops::Deref; use core::ops::Deref;
use core::{
fmt::{self, Display},
hash,
};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -33,6 +37,8 @@ use serde::{Deserialize, Serialize};
#[cfg(feature = "visitor")] #[cfg(feature = "visitor")]
use sqlparser_derive::{Visit, VisitMut}; use sqlparser_derive::{Visit, VisitMut};
use crate::tokenizer::Span;
pub use self::data_type::{ pub use self::data_type::{
ArrayElemTypeDef, CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ArrayElemTypeDef, CharLengthUnits, CharacterLength, DataType, ExactNumberInfo,
StructBracketKind, TimezoneInfo, StructBracketKind, TimezoneInfo,
@ -87,6 +93,9 @@ mod dml;
pub mod helpers; pub mod helpers;
mod operator; mod operator;
mod query; mod query;
mod spans;
pub use spans::Spanned;
mod trigger; mod trigger;
mod value; mod value;
@ -131,7 +140,7 @@ where
} }
/// An identifier, decomposed into its value or character data and the quote style. /// An identifier, decomposed into its value or character data and the quote style.
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[derive(Debug, Clone, PartialOrd, Ord)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct Ident { pub struct Ident {
@ -140,10 +149,41 @@ pub struct Ident {
/// The starting quote if any. Valid quote characters are the single quote, /// The starting quote if any. Valid quote characters are the single quote,
/// double quote, backtick, and opening square bracket. /// double quote, backtick, and opening square bracket.
pub quote_style: Option<char>, pub quote_style: Option<char>,
/// The span of the identifier in the original SQL string.
pub span: Span,
} }
impl PartialEq for Ident {
fn eq(&self, other: &Self) -> bool {
let Ident {
value,
quote_style,
// exhaustiveness check; we ignore spans in comparisons
span: _,
} = self;
value == &other.value && quote_style == &other.quote_style
}
}
impl core::hash::Hash for Ident {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
let Ident {
value,
quote_style,
// exhaustiveness check; we ignore spans in hashes
span: _,
} = self;
value.hash(state);
quote_style.hash(state);
}
}
impl Eq for Ident {}
impl Ident { impl Ident {
/// Create a new identifier with the given value and no quotes. /// Create a new identifier with the given value and no quotes and an empty span.
pub fn new<S>(value: S) -> Self pub fn new<S>(value: S) -> Self
where where
S: Into<String>, S: Into<String>,
@ -151,6 +191,7 @@ impl Ident {
Ident { Ident {
value: value.into(), value: value.into(),
quote_style: None, quote_style: None,
span: Span::empty(),
} }
} }
@ -164,6 +205,30 @@ impl Ident {
Ident { Ident {
value: value.into(), value: value.into(),
quote_style: Some(quote), quote_style: Some(quote),
span: Span::empty(),
}
}
pub fn with_span<S>(span: Span, value: S) -> Self
where
S: Into<String>,
{
Ident {
value: value.into(),
quote_style: None,
span,
}
}
pub fn with_quote_and_span<S>(quote: char, span: Span, value: S) -> Self
where
S: Into<String>,
{
assert!(quote == '\'' || quote == '"' || quote == '`' || quote == '[');
Ident {
value: value.into(),
quote_style: Some(quote),
span,
} }
} }
} }
@ -173,6 +238,7 @@ impl From<&str> for Ident {
Ident { Ident {
value: value.to_string(), value: value.to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
} }
} }
} }
@ -919,10 +985,10 @@ pub enum Expr {
/// `<search modifier>` /// `<search modifier>`
opt_search_modifier: Option<SearchModifier>, opt_search_modifier: Option<SearchModifier>,
}, },
Wildcard, Wildcard(AttachedToken),
/// Qualified wildcard, e.g. `alias.*` or `schema.table.*`. /// Qualified wildcard, e.g. `alias.*` or `schema.table.*`.
/// (Same caveats apply to `QualifiedWildcard` as to `Wildcard`.) /// (Same caveats apply to `QualifiedWildcard` as to `Wildcard`.)
QualifiedWildcard(ObjectName), QualifiedWildcard(ObjectName, AttachedToken),
/// Some dialects support an older syntax for outer joins where columns are /// Some dialects support an older syntax for outer joins where columns are
/// marked with the `(+)` operator in the WHERE clause, for example: /// marked with the `(+)` operator in the WHERE clause, for example:
/// ///
@ -1211,8 +1277,8 @@ impl fmt::Display for Expr {
Expr::MapAccess { column, keys } => { Expr::MapAccess { column, keys } => {
write!(f, "{column}{}", display_separated(keys, "")) write!(f, "{column}{}", display_separated(keys, ""))
} }
Expr::Wildcard => f.write_str("*"), Expr::Wildcard(_) => f.write_str("*"),
Expr::QualifiedWildcard(prefix) => write!(f, "{}.*", prefix), Expr::QualifiedWildcard(prefix, _) => write!(f, "{}.*", prefix),
Expr::CompoundIdentifier(s) => write!(f, "{}", display_separated(s, ".")), Expr::CompoundIdentifier(s) => write!(f, "{}", display_separated(s, ".")),
Expr::IsTrue(ast) => write!(f, "{ast} IS TRUE"), Expr::IsTrue(ast) => write!(f, "{ast} IS TRUE"),
Expr::IsNotTrue(ast) => write!(f, "{ast} IS NOT TRUE"), Expr::IsNotTrue(ast) => write!(f, "{ast} IS NOT TRUE"),
@ -5432,8 +5498,8 @@ pub enum FunctionArgExpr {
impl From<Expr> for FunctionArgExpr { impl From<Expr> for FunctionArgExpr {
fn from(wildcard_expr: Expr) -> Self { fn from(wildcard_expr: Expr) -> Self {
match wildcard_expr { match wildcard_expr {
Expr::QualifiedWildcard(prefix) => Self::QualifiedWildcard(prefix), Expr::QualifiedWildcard(prefix, _) => Self::QualifiedWildcard(prefix),
Expr::Wildcard => Self::Wildcard, Expr::Wildcard(_) => Self::Wildcard,
expr => Self::Expr(expr), expr => Self::Expr(expr),
} }
} }

View file

@ -18,13 +18,17 @@
#[cfg(not(feature = "std"))] #[cfg(not(feature = "std"))]
use alloc::{boxed::Box, vec::Vec}; use alloc::{boxed::Box, vec::Vec};
use helpers::attached_token::AttachedToken;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[cfg(feature = "visitor")] #[cfg(feature = "visitor")]
use sqlparser_derive::{Visit, VisitMut}; use sqlparser_derive::{Visit, VisitMut};
use crate::ast::*; use crate::{
ast::*,
tokenizer::{Token, TokenWithLocation},
};
/// The most complete variant of a `SELECT` query expression, optionally /// The most complete variant of a `SELECT` query expression, optionally
/// including `WITH`, `UNION` / other set operations, and `ORDER BY`. /// including `WITH`, `UNION` / other set operations, and `ORDER BY`.
@ -276,6 +280,8 @@ impl fmt::Display for Table {
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct Select { pub struct Select {
/// Token for the `SELECT` keyword
pub select_token: AttachedToken,
pub distinct: Option<Distinct>, pub distinct: Option<Distinct>,
/// MSSQL syntax: `TOP (<N>) [ PERCENT ] [ WITH TIES ]` /// MSSQL syntax: `TOP (<N>) [ PERCENT ] [ WITH TIES ]`
pub top: Option<Top>, pub top: Option<Top>,
@ -505,6 +511,8 @@ impl fmt::Display for NamedWindowDefinition {
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct With { pub struct With {
// Token for the "WITH" keyword
pub with_token: AttachedToken,
pub recursive: bool, pub recursive: bool,
pub cte_tables: Vec<Cte>, pub cte_tables: Vec<Cte>,
} }
@ -556,6 +564,8 @@ pub struct Cte {
pub query: Box<Query>, pub query: Box<Query>,
pub from: Option<Ident>, pub from: Option<Ident>,
pub materialized: Option<CteAsMaterialized>, pub materialized: Option<CteAsMaterialized>,
// Token for the closing parenthesis
pub closing_paren_token: AttachedToken,
} }
impl fmt::Display for Cte { impl fmt::Display for Cte {
@ -607,10 +617,12 @@ impl fmt::Display for IdentWithAlias {
} }
/// Additional options for wildcards, e.g. Snowflake `EXCLUDE`/`RENAME` and Bigquery `EXCEPT`. /// Additional options for wildcards, e.g. Snowflake `EXCLUDE`/`RENAME` and Bigquery `EXCEPT`.
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Default)] #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct WildcardAdditionalOptions { pub struct WildcardAdditionalOptions {
/// The wildcard token `*`
pub wildcard_token: AttachedToken,
/// `[ILIKE...]`. /// `[ILIKE...]`.
/// Snowflake syntax: <https://docs.snowflake.com/en/sql-reference/sql/select#parameters> /// Snowflake syntax: <https://docs.snowflake.com/en/sql-reference/sql/select#parameters>
pub opt_ilike: Option<IlikeSelectItem>, pub opt_ilike: Option<IlikeSelectItem>,
@ -628,6 +640,19 @@ pub struct WildcardAdditionalOptions {
pub opt_rename: Option<RenameSelectItem>, pub opt_rename: Option<RenameSelectItem>,
} }
impl Default for WildcardAdditionalOptions {
fn default() -> Self {
Self {
wildcard_token: TokenWithLocation::wrap(Token::Mul).into(),
opt_ilike: None,
opt_exclude: None,
opt_except: None,
opt_replace: None,
opt_rename: None,
}
}
}
impl fmt::Display for WildcardAdditionalOptions { impl fmt::Display for WildcardAdditionalOptions {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(ilike) = &self.opt_ilike { if let Some(ilike) = &self.opt_ilike {

2178
src/ast/spans.rs Normal file

File diff suppressed because it is too large Load diff

View file

@ -24,6 +24,7 @@ use core::{
fmt::{self, Display}, fmt::{self, Display},
str::FromStr, str::FromStr,
}; };
use helpers::attached_token::AttachedToken;
use log::debug; use log::debug;
@ -371,7 +372,7 @@ impl<'a> Parser<'a> {
.into_iter() .into_iter()
.map(|token| TokenWithLocation { .map(|token| TokenWithLocation {
token, token,
location: Location { line: 0, column: 0 }, span: Span::empty(),
}) })
.collect(); .collect();
self.with_tokens_with_locations(tokens_with_locations) self.with_tokens_with_locations(tokens_with_locations)
@ -613,7 +614,7 @@ impl<'a> Parser<'a> {
let mut export = false; let mut export = false;
if !dialect_of!(self is MySqlDialect | GenericDialect) { if !dialect_of!(self is MySqlDialect | GenericDialect) {
return parser_err!("Unsupported statement FLUSH", self.peek_token().location); return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
} }
let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) { let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
@ -914,7 +915,7 @@ impl<'a> Parser<'a> {
t @ (Token::Word(_) | Token::SingleQuotedString(_)) => { t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
if self.peek_token().token == Token::Period { if self.peek_token().token == Token::Period {
let mut id_parts: Vec<Ident> = vec![match t { let mut id_parts: Vec<Ident> = vec![match t {
Token::Word(w) => w.to_ident(), Token::Word(w) => w.to_ident(next_token.span),
Token::SingleQuotedString(s) => Ident::with_quote('\'', s), Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
_ => unreachable!(), // We matched above _ => unreachable!(), // We matched above
}]; }];
@ -922,13 +923,16 @@ impl<'a> Parser<'a> {
while self.consume_token(&Token::Period) { while self.consume_token(&Token::Period) {
let next_token = self.next_token(); let next_token = self.next_token();
match next_token.token { match next_token.token {
Token::Word(w) => id_parts.push(w.to_ident()), Token::Word(w) => id_parts.push(w.to_ident(next_token.span)),
Token::SingleQuotedString(s) => { Token::SingleQuotedString(s) => {
// SQLite has single-quoted identifiers // SQLite has single-quoted identifiers
id_parts.push(Ident::with_quote('\'', s)) id_parts.push(Ident::with_quote('\'', s))
} }
Token::Mul => { Token::Mul => {
return Ok(Expr::QualifiedWildcard(ObjectName(id_parts))); return Ok(Expr::QualifiedWildcard(
ObjectName(id_parts),
AttachedToken(next_token),
));
} }
_ => { _ => {
return self return self
@ -939,7 +943,7 @@ impl<'a> Parser<'a> {
} }
} }
Token::Mul => { Token::Mul => {
return Ok(Expr::Wildcard); return Ok(Expr::Wildcard(AttachedToken(next_token)));
} }
_ => (), _ => (),
}; };
@ -1002,7 +1006,7 @@ impl<'a> Parser<'a> {
pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> { pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
let channel = if self.consume_token(&Token::Mul) { let channel = if self.consume_token(&Token::Mul) {
Ident::new(Expr::Wildcard.to_string()) Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
} else { } else {
match self.parse_identifier(false) { match self.parse_identifier(false) {
Ok(expr) => expr, Ok(expr) => expr,
@ -1030,6 +1034,7 @@ impl<'a> Parser<'a> {
fn parse_expr_prefix_by_reserved_word( fn parse_expr_prefix_by_reserved_word(
&mut self, &mut self,
w: &Word, w: &Word,
w_span: Span,
) -> Result<Option<Expr>, ParserError> { ) -> Result<Option<Expr>, ParserError> {
match w.keyword { match w.keyword {
Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => { Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
@ -1047,7 +1052,7 @@ impl<'a> Parser<'a> {
if dialect_of!(self is PostgreSqlDialect | GenericDialect) => if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
{ {
Ok(Some(Expr::Function(Function { Ok(Some(Expr::Function(Function {
name: ObjectName(vec![w.to_ident()]), name: ObjectName(vec![w.to_ident(w_span)]),
parameters: FunctionArguments::None, parameters: FunctionArguments::None,
args: FunctionArguments::None, args: FunctionArguments::None,
null_treatment: None, null_treatment: None,
@ -1061,7 +1066,7 @@ impl<'a> Parser<'a> {
| Keyword::CURRENT_DATE | Keyword::CURRENT_DATE
| Keyword::LOCALTIME | Keyword::LOCALTIME
| Keyword::LOCALTIMESTAMP => { | Keyword::LOCALTIMESTAMP => {
Ok(Some(self.parse_time_functions(ObjectName(vec![w.to_ident()]))?)) Ok(Some(self.parse_time_functions(ObjectName(vec![w.to_ident(w_span)]))?))
} }
Keyword::CASE => Ok(Some(self.parse_case_expr()?)), Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)), Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
@ -1086,7 +1091,7 @@ impl<'a> Parser<'a> {
Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)), Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)), Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
Keyword::POSITION if self.peek_token().token == Token::LParen => { Keyword::POSITION if self.peek_token().token == Token::LParen => {
Ok(Some(self.parse_position_expr(w.to_ident())?)) Ok(Some(self.parse_position_expr(w.to_ident(w_span))?))
} }
Keyword::SUBSTRING => Ok(Some(self.parse_substring_expr()?)), Keyword::SUBSTRING => Ok(Some(self.parse_substring_expr()?)),
Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)), Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
@ -1105,7 +1110,7 @@ impl<'a> Parser<'a> {
let query = self.parse_query()?; let query = self.parse_query()?;
self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?;
Ok(Some(Expr::Function(Function { Ok(Some(Expr::Function(Function {
name: ObjectName(vec![w.to_ident()]), name: ObjectName(vec![w.to_ident(w_span)]),
parameters: FunctionArguments::None, parameters: FunctionArguments::None,
args: FunctionArguments::Subquery(query), args: FunctionArguments::Subquery(query),
filter: None, filter: None,
@ -1134,20 +1139,24 @@ impl<'a> Parser<'a> {
} }
// Tries to parse an expression by a word that is not known to have a special meaning in the dialect. // Tries to parse an expression by a word that is not known to have a special meaning in the dialect.
fn parse_expr_prefix_by_unreserved_word(&mut self, w: &Word) -> Result<Expr, ParserError> { fn parse_expr_prefix_by_unreserved_word(
&mut self,
w: &Word,
w_span: Span,
) -> Result<Expr, ParserError> {
match self.peek_token().token { match self.peek_token().token {
Token::LParen | Token::Period => { Token::LParen | Token::Period => {
let mut id_parts: Vec<Ident> = vec![w.to_ident()]; let mut id_parts: Vec<Ident> = vec![w.to_ident(w_span)];
let mut ends_with_wildcard = false; let mut ending_wildcard: Option<TokenWithLocation> = None;
while self.consume_token(&Token::Period) { while self.consume_token(&Token::Period) {
let next_token = self.next_token(); let next_token = self.next_token();
match next_token.token { match next_token.token {
Token::Word(w) => id_parts.push(w.to_ident()), Token::Word(w) => id_parts.push(w.to_ident(next_token.span)),
Token::Mul => { Token::Mul => {
// Postgres explicitly allows funcnm(tablenm.*) and the // Postgres explicitly allows funcnm(tablenm.*) and the
// function array_agg traverses this control flow // function array_agg traverses this control flow
if dialect_of!(self is PostgreSqlDialect) { if dialect_of!(self is PostgreSqlDialect) {
ends_with_wildcard = true; ending_wildcard = Some(next_token);
break; break;
} else { } else {
return self.expected("an identifier after '.'", next_token); return self.expected("an identifier after '.'", next_token);
@ -1160,8 +1169,11 @@ impl<'a> Parser<'a> {
} }
} }
if ends_with_wildcard { if let Some(wildcard_token) = ending_wildcard {
Ok(Expr::QualifiedWildcard(ObjectName(id_parts))) Ok(Expr::QualifiedWildcard(
ObjectName(id_parts),
AttachedToken(wildcard_token),
))
} else if self.consume_token(&Token::LParen) { } else if self.consume_token(&Token::LParen) {
if dialect_of!(self is SnowflakeDialect | MsSqlDialect) if dialect_of!(self is SnowflakeDialect | MsSqlDialect)
&& self.consume_tokens(&[Token::Plus, Token::RParen]) && self.consume_tokens(&[Token::Plus, Token::RParen])
@ -1194,11 +1206,11 @@ impl<'a> Parser<'a> {
Token::Arrow if self.dialect.supports_lambda_functions() => { Token::Arrow if self.dialect.supports_lambda_functions() => {
self.expect_token(&Token::Arrow)?; self.expect_token(&Token::Arrow)?;
Ok(Expr::Lambda(LambdaFunction { Ok(Expr::Lambda(LambdaFunction {
params: OneOrManyWithParens::One(w.to_ident()), params: OneOrManyWithParens::One(w.to_ident(w_span)),
body: Box::new(self.parse_expr()?), body: Box::new(self.parse_expr()?),
})) }))
} }
_ => Ok(Expr::Identifier(w.to_ident())), _ => Ok(Expr::Identifier(w.to_ident(w_span))),
} }
} }
@ -1225,7 +1237,7 @@ impl<'a> Parser<'a> {
// Note also that naively `SELECT date` looks like a syntax error because the `date` type // Note also that naively `SELECT date` looks like a syntax error because the `date` type
// name is not followed by a string literal, but in fact in PostgreSQL it is a valid // name is not followed by a string literal, but in fact in PostgreSQL it is a valid
// expression that should parse as the column name "date". // expression that should parse as the column name "date".
let loc = self.peek_token().location; let loc = self.peek_token().span.start;
let opt_expr = self.maybe_parse(|parser| { let opt_expr = self.maybe_parse(|parser| {
match parser.parse_data_type()? { match parser.parse_data_type()? {
DataType::Interval => parser.parse_interval(), DataType::Interval => parser.parse_interval(),
@ -1259,12 +1271,14 @@ impl<'a> Parser<'a> {
// //
// We first try to parse the word and following tokens as a special expression, and if that fails, // We first try to parse the word and following tokens as a special expression, and if that fails,
// we rollback and try to parse it as an identifier. // we rollback and try to parse it as an identifier.
match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w)) { match self.try_parse(|parser| {
parser.parse_expr_prefix_by_reserved_word(&w, next_token.span)
}) {
// This word indicated an expression prefix and parsing was successful // This word indicated an expression prefix and parsing was successful
Ok(Some(expr)) => Ok(expr), Ok(Some(expr)) => Ok(expr),
// No expression prefix associated with this word // No expression prefix associated with this word
Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w)?), Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, next_token.span)?),
// If parsing of the word as a special expression failed, we are facing two options: // If parsing of the word as a special expression failed, we are facing two options:
// 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`) // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`)
@ -1275,7 +1289,7 @@ impl<'a> Parser<'a> {
Err(e) => { Err(e) => {
if !self.dialect.is_reserved_for_identifier(w.keyword) { if !self.dialect.is_reserved_for_identifier(w.keyword) {
if let Ok(Some(expr)) = self.maybe_parse(|parser| { if let Ok(Some(expr)) = self.maybe_parse(|parser| {
parser.parse_expr_prefix_by_unreserved_word(&w) parser.parse_expr_prefix_by_unreserved_word(&w, next_token.span)
}) { }) {
return Ok(expr); return Ok(expr);
} }
@ -1377,11 +1391,11 @@ impl<'a> Parser<'a> {
} else { } else {
let tok = self.next_token(); let tok = self.next_token();
let key = match tok.token { let key = match tok.token {
Token::Word(word) => word.to_ident(), Token::Word(word) => word.to_ident(tok.span),
_ => { _ => {
return parser_err!( return parser_err!(
format!("Expected identifier, found: {tok}"), format!("Expected identifier, found: {tok}"),
tok.location tok.span.start
) )
} }
}; };
@ -1471,7 +1485,7 @@ impl<'a> Parser<'a> {
while p.consume_token(&Token::Period) { while p.consume_token(&Token::Period) {
let tok = p.next_token(); let tok = p.next_token();
let name = match tok.token { let name = match tok.token {
Token::Word(word) => word.to_ident(), Token::Word(word) => word.to_ident(tok.span),
_ => return p.expected("identifier", tok), _ => return p.expected("identifier", tok),
}; };
let func = match p.parse_function(ObjectName(vec![name]))? { let func = match p.parse_function(ObjectName(vec![name]))? {
@ -2290,7 +2304,7 @@ impl<'a> Parser<'a> {
} else if self.dialect.require_interval_qualifier() { } else if self.dialect.require_interval_qualifier() {
return parser_err!( return parser_err!(
"INTERVAL requires a unit after the literal value", "INTERVAL requires a unit after the literal value",
self.peek_token().location self.peek_token().span.start
); );
} else { } else {
None None
@ -2381,7 +2395,10 @@ impl<'a> Parser<'a> {
let (fields, trailing_bracket) = let (fields, trailing_bracket) =
self.parse_struct_type_def(Self::parse_struct_field_def)?; self.parse_struct_type_def(Self::parse_struct_field_def)?;
if trailing_bracket.0 { if trailing_bracket.0 {
return parser_err!("unmatched > in STRUCT literal", self.peek_token().location); return parser_err!(
"unmatched > in STRUCT literal",
self.peek_token().span.start
);
} }
self.expect_token(&Token::LParen)?; self.expect_token(&Token::LParen)?;
@ -2411,7 +2428,7 @@ impl<'a> Parser<'a> {
if typed_syntax { if typed_syntax {
return parser_err!("Typed syntax does not allow AS", { return parser_err!("Typed syntax does not allow AS", {
self.prev_token(); self.prev_token();
self.peek_token().location self.peek_token().span.start
}); });
} }
let field_name = self.parse_identifier(false)?; let field_name = self.parse_identifier(false)?;
@ -2464,7 +2481,7 @@ impl<'a> Parser<'a> {
// we've matched all field types for the current struct. // we've matched all field types for the current struct.
// e.g. this is invalid syntax `STRUCT<STRUCT<INT>>>, INT>(NULL)` // e.g. this is invalid syntax `STRUCT<STRUCT<INT>>>, INT>(NULL)`
if trailing_bracket.0 { if trailing_bracket.0 {
return parser_err!("unmatched > in STRUCT definition", start_token.location); return parser_err!("unmatched > in STRUCT definition", start_token.span.start);
} }
}; };
@ -2833,7 +2850,7 @@ impl<'a> Parser<'a> {
format!( format!(
"Expected one of [=, >, <, =>, =<, !=] as comparison operator, found: {op}" "Expected one of [=, >, <, =>, =<, !=] as comparison operator, found: {op}"
), ),
tok.location tok.span.start
); );
}; };
@ -2959,7 +2976,7 @@ impl<'a> Parser<'a> {
// Can only happen if `get_next_precedence` got out of sync with this function // Can only happen if `get_next_precedence` got out of sync with this function
_ => parser_err!( _ => parser_err!(
format!("No infix parser for token {:?}", tok.token), format!("No infix parser for token {:?}", tok.token),
tok.location tok.span.start
), ),
} }
} else if Token::DoubleColon == tok { } else if Token::DoubleColon == tok {
@ -2990,7 +3007,7 @@ impl<'a> Parser<'a> {
// Can only happen if `get_next_precedence` got out of sync with this function // Can only happen if `get_next_precedence` got out of sync with this function
parser_err!( parser_err!(
format!("No infix parser for token {:?}", tok.token), format!("No infix parser for token {:?}", tok.token),
tok.location tok.span.start
) )
} }
} }
@ -3298,14 +3315,14 @@ impl<'a> Parser<'a> {
index += 1; index += 1;
if let Some(TokenWithLocation { if let Some(TokenWithLocation {
token: Token::Whitespace(_), token: Token::Whitespace(_),
location: _, span: _,
}) = token }) = token
{ {
continue; continue;
} }
break token.cloned().unwrap_or(TokenWithLocation { break token.cloned().unwrap_or(TokenWithLocation {
token: Token::EOF, token: Token::EOF,
location: Location { line: 0, column: 0 }, span: Span::empty(),
}); });
}) })
} }
@ -3318,13 +3335,13 @@ impl<'a> Parser<'a> {
match self.tokens.get(index - 1) { match self.tokens.get(index - 1) {
Some(TokenWithLocation { Some(TokenWithLocation {
token: Token::Whitespace(_), token: Token::Whitespace(_),
location: _, span: _,
}) => continue, }) => continue,
non_whitespace => { non_whitespace => {
if n == 0 { if n == 0 {
return non_whitespace.cloned().unwrap_or(TokenWithLocation { return non_whitespace.cloned().unwrap_or(TokenWithLocation {
token: Token::EOF, token: Token::EOF,
location: Location { line: 0, column: 0 }, span: Span::empty(),
}); });
} }
n -= 1; n -= 1;
@ -3346,18 +3363,10 @@ impl<'a> Parser<'a> {
.cloned() .cloned()
.unwrap_or(TokenWithLocation { .unwrap_or(TokenWithLocation {
token: Token::EOF, token: Token::EOF,
location: Location { line: 0, column: 0 }, span: Span::empty(),
}) })
} }
/// Look for all of the expected keywords in sequence, without consuming them
fn peek_keyword(&mut self, expected: Keyword) -> bool {
let index = self.index;
let matched = self.parse_keyword(expected);
self.index = index;
matched
}
/// Look for all of the expected keywords in sequence, without consuming them /// Look for all of the expected keywords in sequence, without consuming them
fn peek_keywords(&mut self, expected: &[Keyword]) -> bool { fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
let index = self.index; let index = self.index;
@ -3375,7 +3384,7 @@ impl<'a> Parser<'a> {
match self.tokens.get(self.index - 1) { match self.tokens.get(self.index - 1) {
Some(TokenWithLocation { Some(TokenWithLocation {
token: Token::Whitespace(_), token: Token::Whitespace(_),
location: _, span: _,
}) => continue, }) => continue,
token => { token => {
return token return token
@ -3401,7 +3410,7 @@ impl<'a> Parser<'a> {
self.index -= 1; self.index -= 1;
if let Some(TokenWithLocation { if let Some(TokenWithLocation {
token: Token::Whitespace(_), token: Token::Whitespace(_),
location: _, span: _,
}) = self.tokens.get(self.index) }) = self.tokens.get(self.index)
{ {
continue; continue;
@ -3414,7 +3423,7 @@ impl<'a> Parser<'a> {
pub fn expected<T>(&self, expected: &str, found: TokenWithLocation) -> Result<T, ParserError> { pub fn expected<T>(&self, expected: &str, found: TokenWithLocation) -> Result<T, ParserError> {
parser_err!( parser_err!(
format!("Expected: {expected}, found: {found}"), format!("Expected: {expected}, found: {found}"),
found.location found.span.start
) )
} }
@ -3422,15 +3431,22 @@ impl<'a> Parser<'a> {
/// true. Otherwise, no tokens are consumed and returns false. /// true. Otherwise, no tokens are consumed and returns false.
#[must_use] #[must_use]
pub fn parse_keyword(&mut self, expected: Keyword) -> bool { pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
self.parse_keyword_token(expected).is_some()
}
#[must_use]
pub fn parse_keyword_token(&mut self, expected: Keyword) -> Option<TokenWithLocation> {
match self.peek_token().token { match self.peek_token().token {
Token::Word(w) if expected == w.keyword => { Token::Word(w) if expected == w.keyword => Some(self.next_token()),
self.next_token(); _ => None,
true
}
_ => false,
} }
} }
#[must_use]
pub fn peek_keyword(&mut self, expected: Keyword) -> bool {
matches!(self.peek_token().token, Token::Word(w) if expected == w.keyword)
}
/// If the current token is the `expected` keyword followed by /// If the current token is the `expected` keyword followed by
/// specified tokens, consume them and returns true. /// specified tokens, consume them and returns true.
/// Otherwise, no tokens are consumed and returns false. /// Otherwise, no tokens are consumed and returns false.
@ -3508,9 +3524,9 @@ impl<'a> Parser<'a> {
/// If the current token is the `expected` keyword, consume the token. /// If the current token is the `expected` keyword, consume the token.
/// Otherwise, return an error. /// Otherwise, return an error.
pub fn expect_keyword(&mut self, expected: Keyword) -> Result<(), ParserError> { pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithLocation, ParserError> {
if self.parse_keyword(expected) { if let Some(token) = self.parse_keyword_token(expected) {
Ok(()) Ok(token)
} else { } else {
self.expected(format!("{:?}", &expected).as_str(), self.peek_token()) self.expected(format!("{:?}", &expected).as_str(), self.peek_token())
} }
@ -3552,9 +3568,9 @@ impl<'a> Parser<'a> {
} }
/// Bail out if the current token is not an expected keyword, or consume it if it is /// Bail out if the current token is not an expected keyword, or consume it if it is
pub fn expect_token(&mut self, expected: &Token) -> Result<(), ParserError> { pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithLocation, ParserError> {
if self.consume_token(expected) { if self.peek_token() == *expected {
Ok(()) Ok(self.next_token())
} else { } else {
self.expected(&expected.to_string(), self.peek_token()) self.expected(&expected.to_string(), self.peek_token())
} }
@ -3749,7 +3765,7 @@ impl<'a> Parser<'a> {
/// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
/// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found. /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> { pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
let loc = self.peek_token().location; let loc = self.peek_token().span.start;
let all = self.parse_keyword(Keyword::ALL); let all = self.parse_keyword(Keyword::ALL);
let distinct = self.parse_keyword(Keyword::DISTINCT); let distinct = self.parse_keyword(Keyword::DISTINCT);
if !distinct { if !distinct {
@ -4828,7 +4844,7 @@ impl<'a> Parser<'a> {
let loc = self let loc = self
.tokens .tokens
.get(self.index - 1) .get(self.index - 1)
.map_or(Location { line: 0, column: 0 }, |t| t.location); .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
match keyword { match keyword {
Keyword::AUTHORIZATION => { Keyword::AUTHORIZATION => {
if authorization_owner.is_some() { if authorization_owner.is_some() {
@ -5138,7 +5154,7 @@ impl<'a> Parser<'a> {
let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
let names = self.parse_comma_separated(|p| p.parse_object_name(false))?; let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
let loc = self.peek_token().location; let loc = self.peek_token().span.start;
let cascade = self.parse_keyword(Keyword::CASCADE); let cascade = self.parse_keyword(Keyword::CASCADE);
let restrict = self.parse_keyword(Keyword::RESTRICT); let restrict = self.parse_keyword(Keyword::RESTRICT);
let purge = self.parse_keyword(Keyword::PURGE); let purge = self.parse_keyword(Keyword::PURGE);
@ -6029,7 +6045,7 @@ impl<'a> Parser<'a> {
let _ = self.consume_token(&Token::Eq); let _ = self.consume_token(&Token::Eq);
let next_token = self.next_token(); let next_token = self.next_token();
match next_token.token { match next_token.token {
Token::Number(s, _) => Some(Self::parse::<u32>(s, next_token.location)?), Token::Number(s, _) => Some(Self::parse::<u32>(s, next_token.span.start)?),
_ => self.expected("literal int", next_token)?, _ => self.expected("literal int", next_token)?,
} }
} else { } else {
@ -6818,7 +6834,7 @@ impl<'a> Parser<'a> {
"FULLTEXT or SPATIAL option without constraint name", "FULLTEXT or SPATIAL option without constraint name",
TokenWithLocation { TokenWithLocation {
token: Token::make_keyword(&name.to_string()), token: Token::make_keyword(&name.to_string()),
location: next_token.location, span: next_token.span,
}, },
); );
} }
@ -7527,7 +7543,7 @@ impl<'a> Parser<'a> {
Expr::Function(f) => Ok(Statement::Call(f)), Expr::Function(f) => Ok(Statement::Call(f)),
other => parser_err!( other => parser_err!(
format!("Expected a simple procedure call but found: {other}"), format!("Expected a simple procedure call but found: {other}"),
self.peek_token().location self.peek_token().span.start
), ),
} }
} else { } else {
@ -7731,7 +7747,7 @@ impl<'a> Parser<'a> {
let loc = self let loc = self
.tokens .tokens
.get(self.index - 1) .get(self.index - 1)
.map_or(Location { line: 0, column: 0 }, |t| t.location); .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
return parser_err!(format!("Expect a char, found {s:?}"), loc); return parser_err!(format!("Expect a char, found {s:?}"), loc);
} }
Ok(s.chars().next().unwrap()) Ok(s.chars().next().unwrap())
@ -7777,7 +7793,7 @@ impl<'a> Parser<'a> {
/// Parse a literal value (numbers, strings, date/time, booleans) /// Parse a literal value (numbers, strings, date/time, booleans)
pub fn parse_value(&mut self) -> Result<Value, ParserError> { pub fn parse_value(&mut self) -> Result<Value, ParserError> {
let next_token = self.next_token(); let next_token = self.next_token();
let location = next_token.location; let span = next_token.span;
match next_token.token { match next_token.token {
Token::Word(w) => match w.keyword { Token::Word(w) => match w.keyword {
Keyword::TRUE if self.dialect.supports_boolean_literals() => { Keyword::TRUE if self.dialect.supports_boolean_literals() => {
@ -7794,7 +7810,7 @@ impl<'a> Parser<'a> {
"A value?", "A value?",
TokenWithLocation { TokenWithLocation {
token: Token::Word(w), token: Token::Word(w),
location, span,
}, },
)?, )?,
}, },
@ -7802,14 +7818,14 @@ impl<'a> Parser<'a> {
"a concrete value", "a concrete value",
TokenWithLocation { TokenWithLocation {
token: Token::Word(w), token: Token::Word(w),
location, span,
}, },
), ),
}, },
// The call to n.parse() returns a bigdecimal when the // The call to n.parse() returns a bigdecimal when the
// bigdecimal feature is enabled, and is otherwise a no-op // bigdecimal feature is enabled, and is otherwise a no-op
// (i.e., it returns the input string). // (i.e., it returns the input string).
Token::Number(n, l) => Ok(Value::Number(Self::parse(n, location)?, l)), Token::Number(n, l) => Ok(Value::Number(Self::parse(n, span.start)?, l)),
Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())), Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())),
Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())), Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())),
Token::TripleSingleQuotedString(ref s) => { Token::TripleSingleQuotedString(ref s) => {
@ -7853,7 +7869,7 @@ impl<'a> Parser<'a> {
// This because snowflake allows numbers as placeholders // This because snowflake allows numbers as placeholders
let next_token = self.next_token(); let next_token = self.next_token();
let ident = match next_token.token { let ident = match next_token.token {
Token::Word(w) => Ok(w.to_ident()), Token::Word(w) => Ok(w.to_ident(next_token.span)),
Token::Number(w, false) => Ok(Ident::new(w)), Token::Number(w, false) => Ok(Ident::new(w)),
_ => self.expected("placeholder", next_token), _ => self.expected("placeholder", next_token),
}?; }?;
@ -7864,7 +7880,7 @@ impl<'a> Parser<'a> {
"a value", "a value",
TokenWithLocation { TokenWithLocation {
token: unexpected, token: unexpected,
location, span,
}, },
), ),
} }
@ -7904,7 +7920,7 @@ impl<'a> Parser<'a> {
fn parse_introduced_string_value(&mut self) -> Result<Value, ParserError> { fn parse_introduced_string_value(&mut self) -> Result<Value, ParserError> {
let next_token = self.next_token(); let next_token = self.next_token();
let location = next_token.location; let span = next_token.span;
match next_token.token { match next_token.token {
Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())), Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())),
Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())), Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())),
@ -7913,7 +7929,7 @@ impl<'a> Parser<'a> {
"a string value", "a string value",
TokenWithLocation { TokenWithLocation {
token: unexpected, token: unexpected,
location, span,
}, },
), ),
} }
@ -7923,7 +7939,7 @@ impl<'a> Parser<'a> {
pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> { pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
let next_token = self.next_token(); let next_token = self.next_token();
match next_token.token { match next_token.token {
Token::Number(s, _) => Self::parse::<u64>(s, next_token.location), Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
_ => self.expected("literal int", next_token), _ => self.expected("literal int", next_token),
} }
} }
@ -8322,7 +8338,7 @@ impl<'a> Parser<'a> {
// (For example, in `FROM t1 JOIN` the `JOIN` will always be parsed as a keyword, // (For example, in `FROM t1 JOIN` the `JOIN` will always be parsed as a keyword,
// not an alias.) // not an alias.)
Token::Word(w) if after_as || !reserved_kwds.contains(&w.keyword) => { Token::Word(w) if after_as || !reserved_kwds.contains(&w.keyword) => {
Ok(Some(w.to_ident())) Ok(Some(w.to_ident(next_token.span)))
} }
// MSSQL supports single-quoted strings as aliases for columns // MSSQL supports single-quoted strings as aliases for columns
// We accept them as table aliases too, although MSSQL does not. // We accept them as table aliases too, although MSSQL does not.
@ -8392,7 +8408,7 @@ impl<'a> Parser<'a> {
_ => { _ => {
return parser_err!( return parser_err!(
"BUG: expected to match GroupBy modifier keyword", "BUG: expected to match GroupBy modifier keyword",
self.peek_token().location self.peek_token().span.start
) )
} }
}); });
@ -8455,6 +8471,7 @@ impl<'a> Parser<'a> {
.map(|value| Ident { .map(|value| Ident {
value: value.into(), value: value.into(),
quote_style: ident.quote_style, quote_style: ident.quote_style,
span: ident.span,
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
}) })
@ -8470,7 +8487,7 @@ impl<'a> Parser<'a> {
loop { loop {
match self.peek_token().token { match self.peek_token().token {
Token::Word(w) => { Token::Word(w) => {
idents.push(w.to_ident()); idents.push(w.to_ident(self.peek_token().span));
} }
Token::EOF | Token::Eq => break, Token::EOF | Token::Eq => break,
_ => {} _ => {}
@ -8523,8 +8540,9 @@ impl<'a> Parser<'a> {
let mut idents = vec![]; let mut idents = vec![];
// expecting at least one word for identifier // expecting at least one word for identifier
match self.next_token().token { let next_token = self.next_token();
Token::Word(w) => idents.push(w.to_ident()), match next_token.token {
Token::Word(w) => idents.push(w.to_ident(next_token.span)),
Token::EOF => { Token::EOF => {
return Err(ParserError::ParserError( return Err(ParserError::ParserError(
"Empty input when parsing identifier".to_string(), "Empty input when parsing identifier".to_string(),
@ -8541,19 +8559,22 @@ impl<'a> Parser<'a> {
loop { loop {
match self.next_token().token { match self.next_token().token {
// ensure that optional period is succeeded by another identifier // ensure that optional period is succeeded by another identifier
Token::Period => match self.next_token().token { Token::Period => {
Token::Word(w) => idents.push(w.to_ident()), let next_token = self.next_token();
Token::EOF => { match next_token.token {
return Err(ParserError::ParserError( Token::Word(w) => idents.push(w.to_ident(next_token.span)),
"Trailing period in identifier".to_string(), Token::EOF => {
))? return Err(ParserError::ParserError(
"Trailing period in identifier".to_string(),
))?
}
token => {
return Err(ParserError::ParserError(format!(
"Unexpected token following period in identifier: {token}"
)))?
}
} }
token => { }
return Err(ParserError::ParserError(format!(
"Unexpected token following period in identifier: {token}"
)))?
}
},
Token::EOF => break, Token::EOF => break,
token => { token => {
return Err(ParserError::ParserError(format!( return Err(ParserError::ParserError(format!(
@ -8575,7 +8596,7 @@ impl<'a> Parser<'a> {
let next_token = self.next_token(); let next_token = self.next_token();
match next_token.token { match next_token.token {
Token::Word(w) => { Token::Word(w) => {
let mut ident = w.to_ident(); let mut ident = w.to_ident(next_token.span);
// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or // On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or
// TABLE clause [0]. // TABLE clause [0].
@ -9006,8 +9027,9 @@ impl<'a> Parser<'a> {
/// expect the initial keyword to be already consumed /// expect the initial keyword to be already consumed
pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> { pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
let _guard = self.recursion_counter.try_decrease()?; let _guard = self.recursion_counter.try_decrease()?;
let with = if self.parse_keyword(Keyword::WITH) { let with = if let Some(with_token) = self.parse_keyword_token(Keyword::WITH) {
Some(With { Some(With {
with_token: with_token.into(),
recursive: self.parse_keyword(Keyword::RECURSIVE), recursive: self.parse_keyword(Keyword::RECURSIVE),
cte_tables: self.parse_comma_separated(Parser::parse_cte)?, cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
}) })
@ -9265,8 +9287,10 @@ impl<'a> Parser<'a> {
} }
} }
self.expect_token(&Token::LParen)?; self.expect_token(&Token::LParen)?;
let query = self.parse_query()?; let query = self.parse_query()?;
self.expect_token(&Token::RParen)?; let closing_paren_token = self.expect_token(&Token::RParen)?;
let alias = TableAlias { let alias = TableAlias {
name, name,
columns: vec![], columns: vec![],
@ -9276,6 +9300,7 @@ impl<'a> Parser<'a> {
query, query,
from: None, from: None,
materialized: is_materialized, materialized: is_materialized,
closing_paren_token: closing_paren_token.into(),
} }
} else { } else {
let columns = self.parse_table_alias_column_defs()?; let columns = self.parse_table_alias_column_defs()?;
@ -9289,14 +9314,17 @@ impl<'a> Parser<'a> {
} }
} }
self.expect_token(&Token::LParen)?; self.expect_token(&Token::LParen)?;
let query = self.parse_query()?; let query = self.parse_query()?;
self.expect_token(&Token::RParen)?; let closing_paren_token = self.expect_token(&Token::RParen)?;
let alias = TableAlias { name, columns }; let alias = TableAlias { name, columns };
Cte { Cte {
alias, alias,
query, query,
from: None, from: None,
materialized: is_materialized, materialized: is_materialized,
closing_paren_token: closing_paren_token.into(),
} }
}; };
if self.parse_keyword(Keyword::FROM) { if self.parse_keyword(Keyword::FROM) {
@ -9316,7 +9344,7 @@ impl<'a> Parser<'a> {
pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> { pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
// We parse the expression using a Pratt parser, as in `parse_expr()`. // We parse the expression using a Pratt parser, as in `parse_expr()`.
// Start by parsing a restricted SELECT or a `(subquery)`: // Start by parsing a restricted SELECT or a `(subquery)`:
let expr = if self.parse_keyword(Keyword::SELECT) { let expr = if self.peek_keyword(Keyword::SELECT) {
SetExpr::Select(self.parse_select().map(Box::new)?) SetExpr::Select(self.parse_select().map(Box::new)?)
} else if self.consume_token(&Token::LParen) { } else if self.consume_token(&Token::LParen) {
// CTEs are not allowed here, but the parser currently accepts them // CTEs are not allowed here, but the parser currently accepts them
@ -9405,9 +9433,9 @@ impl<'a> Parser<'a> {
} }
} }
/// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`), /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`)
/// assuming the initial `SELECT` was already consumed
pub fn parse_select(&mut self) -> Result<Select, ParserError> { pub fn parse_select(&mut self) -> Result<Select, ParserError> {
let select_token = self.expect_keyword(Keyword::SELECT)?;
let value_table_mode = let value_table_mode =
if dialect_of!(self is BigQueryDialect) && self.parse_keyword(Keyword::AS) { if dialect_of!(self is BigQueryDialect) && self.parse_keyword(Keyword::AS) {
if self.parse_keyword(Keyword::VALUE) { if self.parse_keyword(Keyword::VALUE) {
@ -9571,6 +9599,7 @@ impl<'a> Parser<'a> {
}; };
Ok(Select { Ok(Select {
select_token: AttachedToken(select_token),
distinct, distinct,
top, top,
top_before_distinct, top_before_distinct,
@ -10656,7 +10685,7 @@ impl<'a> Parser<'a> {
return self.expected("literal number", next_token); return self.expected("literal number", next_token);
}; };
self.expect_token(&Token::RBrace)?; self.expect_token(&Token::RBrace)?;
RepetitionQuantifier::AtMost(Self::parse(n, token.location)?) RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
} }
Token::Number(n, _) if self.consume_token(&Token::Comma) => { Token::Number(n, _) if self.consume_token(&Token::Comma) => {
let next_token = self.next_token(); let next_token = self.next_token();
@ -10664,12 +10693,12 @@ impl<'a> Parser<'a> {
Token::Number(m, _) => { Token::Number(m, _) => {
self.expect_token(&Token::RBrace)?; self.expect_token(&Token::RBrace)?;
RepetitionQuantifier::Range( RepetitionQuantifier::Range(
Self::parse(n, token.location)?, Self::parse(n, token.span.start)?,
Self::parse(m, token.location)?, Self::parse(m, token.span.start)?,
) )
} }
Token::RBrace => { Token::RBrace => {
RepetitionQuantifier::AtLeast(Self::parse(n, token.location)?) RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
} }
_ => { _ => {
return self.expected("} or upper bound", next_token); return self.expected("} or upper bound", next_token);
@ -10678,7 +10707,7 @@ impl<'a> Parser<'a> {
} }
Token::Number(n, _) => { Token::Number(n, _) => {
self.expect_token(&Token::RBrace)?; self.expect_token(&Token::RBrace)?;
RepetitionQuantifier::Exactly(Self::parse(n, token.location)?) RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
} }
_ => return self.expected("quantifier range", token), _ => return self.expected("quantifier range", token),
} }
@ -11113,7 +11142,7 @@ impl<'a> Parser<'a> {
.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) .parse_keywords(&[Keyword::GRANTED, Keyword::BY])
.then(|| self.parse_identifier(false).unwrap()); .then(|| self.parse_identifier(false).unwrap());
let loc = self.peek_token().location; let loc = self.peek_token().span.start;
let cascade = self.parse_keyword(Keyword::CASCADE); let cascade = self.parse_keyword(Keyword::CASCADE);
let restrict = self.parse_keyword(Keyword::RESTRICT); let restrict = self.parse_keyword(Keyword::RESTRICT);
if cascade && restrict { if cascade && restrict {
@ -11132,7 +11161,10 @@ impl<'a> Parser<'a> {
/// Parse an REPLACE statement /// Parse an REPLACE statement
pub fn parse_replace(&mut self) -> Result<Statement, ParserError> { pub fn parse_replace(&mut self) -> Result<Statement, ParserError> {
if !dialect_of!(self is MySqlDialect | GenericDialect) { if !dialect_of!(self is MySqlDialect | GenericDialect) {
return parser_err!("Unsupported statement REPLACE", self.peek_token().location); return parser_err!(
"Unsupported statement REPLACE",
self.peek_token().span.start
);
} }
let mut insert = self.parse_insert()?; let mut insert = self.parse_insert()?;
@ -11593,7 +11625,7 @@ impl<'a> Parser<'a> {
} }
fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> { fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
let loc = self.peek_token().location; let loc = self.peek_token().span.start;
match ( match (
self.parse_keyword(Keyword::ALL), self.parse_keyword(Keyword::ALL),
self.parse_keyword(Keyword::DISTINCT), self.parse_keyword(Keyword::DISTINCT),
@ -11608,17 +11640,17 @@ impl<'a> Parser<'a> {
/// Parse a comma-delimited list of projections after SELECT /// Parse a comma-delimited list of projections after SELECT
pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> { pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
match self.parse_wildcard_expr()? { match self.parse_wildcard_expr()? {
Expr::QualifiedWildcard(prefix) => Ok(SelectItem::QualifiedWildcard( Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
prefix, prefix,
self.parse_wildcard_additional_options()?, self.parse_wildcard_additional_options(token.0)?,
)), )),
Expr::Wildcard => Ok(SelectItem::Wildcard( Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
self.parse_wildcard_additional_options()?, self.parse_wildcard_additional_options(token.0)?,
)), )),
Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => { Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
parser_err!( parser_err!(
format!("Expected an expression, found: {}", v), format!("Expected an expression, found: {}", v),
self.peek_token().location self.peek_token().span.start
) )
} }
Expr::BinaryOp { Expr::BinaryOp {
@ -11631,7 +11663,7 @@ impl<'a> Parser<'a> {
let Expr::Identifier(alias) = *left else { let Expr::Identifier(alias) = *left else {
return parser_err!( return parser_err!(
"BUG: expected identifier expression as alias", "BUG: expected identifier expression as alias",
self.peek_token().location self.peek_token().span.start
); );
}; };
Ok(SelectItem::ExprWithAlias { Ok(SelectItem::ExprWithAlias {
@ -11653,6 +11685,7 @@ impl<'a> Parser<'a> {
/// If it is not possible to parse it, will return an option. /// If it is not possible to parse it, will return an option.
pub fn parse_wildcard_additional_options( pub fn parse_wildcard_additional_options(
&mut self, &mut self,
wildcard_token: TokenWithLocation,
) -> Result<WildcardAdditionalOptions, ParserError> { ) -> Result<WildcardAdditionalOptions, ParserError> {
let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) { let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
self.parse_optional_select_item_ilike()? self.parse_optional_select_item_ilike()?
@ -11684,6 +11717,7 @@ impl<'a> Parser<'a> {
}; };
Ok(WildcardAdditionalOptions { Ok(WildcardAdditionalOptions {
wildcard_token: wildcard_token.into(),
opt_ilike, opt_ilike,
opt_exclude, opt_exclude,
opt_except, opt_except,
@ -11931,7 +11965,7 @@ impl<'a> Parser<'a> {
} else { } else {
let next_token = self.next_token(); let next_token = self.next_token();
let quantity = match next_token.token { let quantity = match next_token.token {
Token::Number(s, _) => Self::parse::<u64>(s, next_token.location)?, Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
_ => self.expected("literal int", next_token)?, _ => self.expected("literal int", next_token)?,
}; };
Some(TopQuantity::Constant(quantity)) Some(TopQuantity::Constant(quantity))
@ -12812,10 +12846,11 @@ impl<'a> Parser<'a> {
} }
impl Word { impl Word {
pub fn to_ident(&self) -> Ident { pub fn to_ident(&self, span: Span) -> Ident {
Ident { Ident {
value: self.value.clone(), value: self.value.clone(),
quote_style: self.quote_style, quote_style: self.quote_style,
span,
} }
} }
} }
@ -13389,14 +13424,17 @@ mod tests {
Ident { Ident {
value: "CATALOG".to_string(), value: "CATALOG".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
Ident { Ident {
value: "F(o)o. \"bar".to_string(), value: "F(o)o. \"bar".to_string(),
quote_style: Some('"'), quote_style: Some('"'),
span: Span::empty(),
}, },
Ident { Ident {
value: "table".to_string(), value: "table".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
]; ];
dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| { dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
@ -13409,10 +13447,12 @@ mod tests {
Ident { Ident {
value: "CATALOG".to_string(), value: "CATALOG".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
Ident { Ident {
value: "table".to_string(), value: "table".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
]; ];
dialect.run_parser_method("CATALOG . table", |parser| { dialect.run_parser_method("CATALOG . table", |parser| {

View file

@ -29,10 +29,10 @@ use alloc::{
vec, vec,
vec::Vec, vec::Vec,
}; };
use core::fmt;
use core::iter::Peekable; use core::iter::Peekable;
use core::num::NonZeroU8; use core::num::NonZeroU8;
use core::str::Chars; use core::str::Chars;
use core::{cmp, fmt};
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -422,7 +422,9 @@ impl fmt::Display for Whitespace {
} }
/// Location in input string /// Location in input string
#[derive(Debug, Eq, PartialEq, Clone, Copy)] #[derive(Eq, PartialEq, Hash, Clone, Copy, Ord, PartialOrd)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct Location { pub struct Location {
/// Line number, starting from 1 /// Line number, starting from 1
pub line: u64, pub line: u64,
@ -431,36 +433,114 @@ pub struct Location {
} }
impl fmt::Display for Location { impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.line == 0 { if self.line == 0 {
return Ok(()); return Ok(());
} }
write!( write!(f, " at Line: {}, Column: {}", self.line, self.column)
f, }
// TODO: use standard compiler location syntax (<path>:<line>:<col>) }
" at Line: {}, Column: {}",
self.line, self.column, impl fmt::Debug for Location {
) fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Location({},{})", self.line, self.column)
}
}
impl Location {
pub fn of(line: u64, column: u64) -> Self {
Self { line, column }
}
pub fn span_to(self, end: Self) -> Span {
Span { start: self, end }
}
}
impl From<(u64, u64)> for Location {
fn from((line, column): (u64, u64)) -> Self {
Self { line, column }
}
}
/// A span of source code locations (start, end)
#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord, Copy)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct Span {
pub start: Location,
pub end: Location,
}
impl fmt::Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Span({:?}..{:?})", self.start, self.end)
}
}
impl Span {
// An empty span (0, 0) -> (0, 0)
// We need a const instance for pattern matching
const EMPTY: Span = Self::empty();
pub fn new(start: Location, end: Location) -> Span {
Span { start, end }
}
/// Returns an empty span (0, 0) -> (0, 0)
/// Empty spans represent no knowledge of source location
pub const fn empty() -> Span {
Span {
start: Location { line: 0, column: 0 },
end: Location { line: 0, column: 0 },
}
}
/// Returns the smallest Span that contains both `self` and `other`
/// If either span is [Span::empty], the other span is returned
pub fn union(&self, other: &Span) -> Span {
// If either span is empty, return the other
// this prevents propagating (0, 0) through the tree
match (self, other) {
(&Span::EMPTY, _) => *other,
(_, &Span::EMPTY) => *self,
_ => Span {
start: cmp::min(self.start, other.start),
end: cmp::max(self.end, other.end),
},
}
}
/// Same as [Span::union] for `Option<Span>`
/// If `other` is `None`, `self` is returned
pub fn union_opt(&self, other: &Option<Span>) -> Span {
match other {
Some(other) => self.union(other),
None => *self,
}
} }
} }
/// A [Token] with [Location] attached to it /// A [Token] with [Location] attached to it
#[derive(Debug, Eq, PartialEq, Clone)] #[derive(Debug, Clone, Hash, Ord, PartialOrd, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct TokenWithLocation { pub struct TokenWithLocation {
pub token: Token, pub token: Token,
pub location: Location, pub span: Span,
} }
impl TokenWithLocation { impl TokenWithLocation {
pub fn new(token: Token, line: u64, column: u64) -> TokenWithLocation { pub fn new(token: Token, span: Span) -> TokenWithLocation {
TokenWithLocation { TokenWithLocation { token, span }
token,
location: Location { line, column },
}
} }
pub fn wrap(token: Token) -> TokenWithLocation { pub fn wrap(token: Token) -> TokenWithLocation {
TokenWithLocation::new(token, 0, 0) TokenWithLocation::new(token, Span::empty())
}
pub fn at(token: Token, start: Location, end: Location) -> TokenWithLocation {
TokenWithLocation::new(token, Span::new(start, end))
} }
} }
@ -656,7 +736,9 @@ impl<'a> Tokenizer<'a> {
let mut location = state.location(); let mut location = state.location();
while let Some(token) = self.next_token(&mut state)? { while let Some(token) = self.next_token(&mut state)? {
buf.push(TokenWithLocation { token, location }); let span = location.span_to(state.location());
buf.push(TokenWithLocation { token, span });
location = state.location(); location = state.location();
} }
@ -2669,18 +2751,30 @@ mod tests {
.tokenize_with_location() .tokenize_with_location()
.unwrap(); .unwrap();
let expected = vec![ let expected = vec![
TokenWithLocation::new(Token::make_keyword("SELECT"), 1, 1), TokenWithLocation::at(Token::make_keyword("SELECT"), (1, 1).into(), (1, 7).into()),
TokenWithLocation::new(Token::Whitespace(Whitespace::Space), 1, 7), TokenWithLocation::at(
TokenWithLocation::new(Token::make_word("a", None), 1, 8), Token::Whitespace(Whitespace::Space),
TokenWithLocation::new(Token::Comma, 1, 9), (1, 7).into(),
TokenWithLocation::new(Token::Whitespace(Whitespace::Newline), 1, 10), (1, 8).into(),
TokenWithLocation::new(Token::Whitespace(Whitespace::Space), 2, 1), ),
TokenWithLocation::new(Token::make_word("b", None), 2, 2), TokenWithLocation::at(Token::make_word("a", None), (1, 8).into(), (1, 9).into()),
TokenWithLocation::at(Token::Comma, (1, 9).into(), (1, 10).into()),
TokenWithLocation::at(
Token::Whitespace(Whitespace::Newline),
(1, 10).into(),
(2, 1).into(),
),
TokenWithLocation::at(
Token::Whitespace(Whitespace::Space),
(2, 1).into(),
(2, 2).into(),
),
TokenWithLocation::at(Token::make_word("b", None), (2, 2).into(), (2, 3).into()),
]; ];
compare(expected, tokens); compare(expected, tokens);
} }
fn compare<T: PartialEq + std::fmt::Debug>(expected: Vec<T>, actual: Vec<T>) { fn compare<T: PartialEq + fmt::Debug>(expected: Vec<T>, actual: Vec<T>) {
//println!("------------------------------"); //println!("------------------------------");
//println!("tokens = {:?}", actual); //println!("tokens = {:?}", actual);
//println!("expected = {:?}", expected); //println!("expected = {:?}", expected);

View file

@ -23,6 +23,7 @@ use std::ops::Deref;
use sqlparser::ast::*; use sqlparser::ast::*;
use sqlparser::dialect::{BigQueryDialect, GenericDialect}; use sqlparser::dialect::{BigQueryDialect, GenericDialect};
use sqlparser::parser::{ParserError, ParserOptions}; use sqlparser::parser::{ParserError, ParserOptions};
use sqlparser::tokenizer::Span;
use test_utils::*; use test_utils::*;
#[test] #[test]
@ -678,10 +679,12 @@ fn parse_typed_struct_syntax_bigquery() {
Ident { Ident {
value: "t".into(), value: "t".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
Ident { Ident {
value: "str_col".into(), value: "str_col".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
]), ]),
], ],
@ -690,6 +693,7 @@ fn parse_typed_struct_syntax_bigquery() {
field_name: Some(Ident { field_name: Some(Ident {
value: "x".into(), value: "x".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}), }),
field_type: DataType::Int64 field_type: DataType::Int64
}, },
@ -697,6 +701,7 @@ fn parse_typed_struct_syntax_bigquery() {
field_name: Some(Ident { field_name: Some(Ident {
value: "y".into(), value: "y".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}), }),
field_type: DataType::String(None) field_type: DataType::String(None)
}, },
@ -709,6 +714,7 @@ fn parse_typed_struct_syntax_bigquery() {
values: vec![Expr::Identifier(Ident { values: vec![Expr::Identifier(Ident {
value: "nested_col".into(), value: "nested_col".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}),], }),],
fields: vec![ fields: vec![
StructField { StructField {
@ -740,6 +746,7 @@ fn parse_typed_struct_syntax_bigquery() {
values: vec![Expr::Identifier(Ident { values: vec![Expr::Identifier(Ident {
value: "nested_col".into(), value: "nested_col".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}),], }),],
fields: vec![ fields: vec![
StructField { StructField {
@ -987,10 +994,12 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
Ident { Ident {
value: "t".into(), value: "t".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
Ident { Ident {
value: "str_col".into(), value: "str_col".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
]), ]),
], ],
@ -999,6 +1008,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
field_name: Some(Ident { field_name: Some(Ident {
value: "x".into(), value: "x".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}), }),
field_type: DataType::Int64 field_type: DataType::Int64
}, },
@ -1006,6 +1016,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
field_name: Some(Ident { field_name: Some(Ident {
value: "y".into(), value: "y".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}), }),
field_type: DataType::String(None) field_type: DataType::String(None)
}, },
@ -1018,6 +1029,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
values: vec![Expr::Identifier(Ident { values: vec![Expr::Identifier(Ident {
value: "nested_col".into(), value: "nested_col".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}),], }),],
fields: vec![ fields: vec![
StructField { StructField {
@ -1049,6 +1061,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() {
values: vec![Expr::Identifier(Ident { values: vec![Expr::Identifier(Ident {
value: "nested_col".into(), value: "nested_col".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}),], }),],
fields: vec![ fields: vec![
StructField { StructField {

View file

@ -21,6 +21,8 @@
#[macro_use] #[macro_use]
mod test_utils; mod test_utils;
use helpers::attached_token::AttachedToken;
use sqlparser::tokenizer::Span;
use test_utils::*; use test_utils::*;
use sqlparser::ast::Expr::{BinaryOp, Identifier, MapAccess}; use sqlparser::ast::Expr::{BinaryOp, Identifier, MapAccess};
@ -39,12 +41,14 @@ fn parse_map_access_expr() {
assert_eq!( assert_eq!(
Select { Select {
distinct: None, distinct: None,
select_token: AttachedToken::empty(),
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
projection: vec![UnnamedExpr(MapAccess { projection: vec![UnnamedExpr(MapAccess {
column: Box::new(Identifier(Ident { column: Box::new(Identifier(Ident {
value: "string_values".to_string(), value: "string_values".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
keys: vec![MapAccessKey { keys: vec![MapAccessKey {
key: call( key: call(
@ -903,7 +907,8 @@ fn parse_create_view_with_fields_data_types() {
data_type: Some(DataType::Custom( data_type: Some(DataType::Custom(
ObjectName(vec![Ident { ObjectName(vec![Ident {
value: "int".into(), value: "int".into(),
quote_style: Some('"') quote_style: Some('"'),
span: Span::empty(),
}]), }]),
vec![] vec![]
)), )),
@ -914,7 +919,8 @@ fn parse_create_view_with_fields_data_types() {
data_type: Some(DataType::Custom( data_type: Some(DataType::Custom(
ObjectName(vec![Ident { ObjectName(vec![Ident {
value: "String".into(), value: "String".into(),
quote_style: Some('"') quote_style: Some('"'),
span: Span::empty(),
}]), }]),
vec![] vec![]
)), )),

View file

@ -25,6 +25,7 @@
extern crate core; extern crate core;
use helpers::attached_token::AttachedToken;
use matches::assert_matches; use matches::assert_matches;
use sqlparser::ast::SelectItem::UnnamedExpr; use sqlparser::ast::SelectItem::UnnamedExpr;
use sqlparser::ast::TableFactor::{Pivot, Unpivot}; use sqlparser::ast::TableFactor::{Pivot, Unpivot};
@ -36,6 +37,7 @@ use sqlparser::dialect::{
}; };
use sqlparser::keywords::{Keyword, ALL_KEYWORDS}; use sqlparser::keywords::{Keyword, ALL_KEYWORDS};
use sqlparser::parser::{Parser, ParserError, ParserOptions}; use sqlparser::parser::{Parser, ParserError, ParserOptions};
use sqlparser::tokenizer::Span;
use sqlparser::tokenizer::Tokenizer; use sqlparser::tokenizer::Tokenizer;
use test_utils::{ use test_utils::{
all_dialects, all_dialects_where, alter_table_op, assert_eq_vec, call, expr_from_projection, all_dialects, all_dialects_where, alter_table_op, assert_eq_vec, call, expr_from_projection,
@ -378,6 +380,7 @@ fn parse_update_set_from() {
subquery: Box::new(Query { subquery: Box::new(Query {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -1271,6 +1274,7 @@ fn parse_select_with_date_column_name() {
&Expr::Identifier(Ident { &Expr::Identifier(Ident {
value: "date".into(), value: "date".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}), }),
expr_from_projection(only(&select.projection)), expr_from_projection(only(&select.projection)),
); );
@ -1789,6 +1793,7 @@ fn parse_null_like() {
alias: Ident { alias: Ident {
value: "col_null".to_owned(), value: "col_null".to_owned(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
}, },
select.projection[0] select.projection[0]
@ -1805,6 +1810,7 @@ fn parse_null_like() {
alias: Ident { alias: Ident {
value: "null_col".to_owned(), value: "null_col".to_owned(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
}, },
select.projection[1] select.projection[1]
@ -2823,6 +2829,7 @@ fn parse_listagg() {
expr: Expr::Identifier(Ident { expr: Expr::Identifier(Ident {
value: "id".to_string(), value: "id".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}), }),
asc: None, asc: None,
nulls_first: None, nulls_first: None,
@ -2832,6 +2839,7 @@ fn parse_listagg() {
expr: Expr::Identifier(Ident { expr: Expr::Identifier(Ident {
value: "username".to_string(), value: "username".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}), }),
asc: None, asc: None,
nulls_first: None, nulls_first: None,
@ -4038,7 +4046,8 @@ fn parse_alter_table() {
[SqlOption::KeyValue { [SqlOption::KeyValue {
key: Ident { key: Ident {
value: "classification".to_string(), value: "classification".to_string(),
quote_style: Some('\'') quote_style: Some('\''),
span: Span::empty(),
}, },
value: Expr::Value(Value::SingleQuotedString("parquet".to_string())), value: Expr::Value(Value::SingleQuotedString("parquet".to_string())),
}], }],
@ -4824,6 +4833,7 @@ fn test_parse_named_window() {
ORDER BY C3"; ORDER BY C3";
let actual_select_only = verified_only_select(sql); let actual_select_only = verified_only_select(sql);
let expected = Select { let expected = Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -4833,6 +4843,7 @@ fn test_parse_named_window() {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "MIN".to_string(), value: "MIN".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}]), }]),
parameters: FunctionArguments::None, parameters: FunctionArguments::None,
args: FunctionArguments::List(FunctionArgumentList { args: FunctionArguments::List(FunctionArgumentList {
@ -4841,6 +4852,7 @@ fn test_parse_named_window() {
Expr::Identifier(Ident { Expr::Identifier(Ident {
value: "c12".to_string(), value: "c12".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}), }),
))], ))],
clauses: vec![], clauses: vec![],
@ -4850,12 +4862,14 @@ fn test_parse_named_window() {
over: Some(WindowType::NamedWindow(Ident { over: Some(WindowType::NamedWindow(Ident {
value: "window1".to_string(), value: "window1".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
within_group: vec![], within_group: vec![],
}), }),
alias: Ident { alias: Ident {
value: "min1".to_string(), value: "min1".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
}, },
SelectItem::ExprWithAlias { SelectItem::ExprWithAlias {
@ -4863,6 +4877,7 @@ fn test_parse_named_window() {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "MAX".to_string(), value: "MAX".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}]), }]),
parameters: FunctionArguments::None, parameters: FunctionArguments::None,
args: FunctionArguments::List(FunctionArgumentList { args: FunctionArguments::List(FunctionArgumentList {
@ -4871,6 +4886,7 @@ fn test_parse_named_window() {
Expr::Identifier(Ident { Expr::Identifier(Ident {
value: "c12".to_string(), value: "c12".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}), }),
))], ))],
clauses: vec![], clauses: vec![],
@ -4880,12 +4896,14 @@ fn test_parse_named_window() {
over: Some(WindowType::NamedWindow(Ident { over: Some(WindowType::NamedWindow(Ident {
value: "window2".to_string(), value: "window2".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
within_group: vec![], within_group: vec![],
}), }),
alias: Ident { alias: Ident {
value: "max1".to_string(), value: "max1".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
}, },
], ],
@ -4895,6 +4913,7 @@ fn test_parse_named_window() {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "aggregate_test_100".to_string(), value: "aggregate_test_100".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}]), }]),
alias: None, alias: None,
args: None, args: None,
@ -4919,6 +4938,7 @@ fn test_parse_named_window() {
Ident { Ident {
value: "window1".to_string(), value: "window1".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
NamedWindowExpr::WindowSpec(WindowSpec { NamedWindowExpr::WindowSpec(WindowSpec {
window_name: None, window_name: None,
@ -4927,6 +4947,7 @@ fn test_parse_named_window() {
expr: Expr::Identifier(Ident { expr: Expr::Identifier(Ident {
value: "C12".to_string(), value: "C12".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}), }),
asc: None, asc: None,
nulls_first: None, nulls_first: None,
@ -4939,12 +4960,14 @@ fn test_parse_named_window() {
Ident { Ident {
value: "window2".to_string(), value: "window2".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
NamedWindowExpr::WindowSpec(WindowSpec { NamedWindowExpr::WindowSpec(WindowSpec {
window_name: None, window_name: None,
partition_by: vec![Expr::Identifier(Ident { partition_by: vec![Expr::Identifier(Ident {
value: "C11".to_string(), value: "C11".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
})], })],
order_by: vec![], order_by: vec![],
window_frame: None, window_frame: None,
@ -5425,6 +5448,7 @@ fn interval_disallow_interval_expr_gt() {
right: Box::new(Expr::Identifier(Ident { right: Box::new(Expr::Identifier(Ident {
value: "x".to_string(), value: "x".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
} }
) )
@ -5465,12 +5489,14 @@ fn parse_interval_and_or_xor() {
let expected_ast = vec![Statement::Query(Box::new(Query { let expected_ast = vec![Statement::Query(Box::new(Query {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
projection: vec![UnnamedExpr(Expr::Identifier(Ident { projection: vec![UnnamedExpr(Expr::Identifier(Ident {
value: "col".to_string(), value: "col".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}))], }))],
into: None, into: None,
from: vec![TableWithJoins { from: vec![TableWithJoins {
@ -5478,6 +5504,7 @@ fn parse_interval_and_or_xor() {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "test".to_string(), value: "test".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}]), }]),
alias: None, alias: None,
args: None, args: None,
@ -5496,12 +5523,14 @@ fn parse_interval_and_or_xor() {
left: Box::new(Expr::Identifier(Ident { left: Box::new(Expr::Identifier(Ident {
value: "d3_date".to_string(), value: "d3_date".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
op: BinaryOperator::Gt, op: BinaryOperator::Gt,
right: Box::new(Expr::BinaryOp { right: Box::new(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident { left: Box::new(Expr::Identifier(Ident {
value: "d1_date".to_string(), value: "d1_date".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
op: BinaryOperator::Plus, op: BinaryOperator::Plus,
right: Box::new(Expr::Interval(Interval { right: Box::new(Expr::Interval(Interval {
@ -5520,12 +5549,14 @@ fn parse_interval_and_or_xor() {
left: Box::new(Expr::Identifier(Ident { left: Box::new(Expr::Identifier(Ident {
value: "d2_date".to_string(), value: "d2_date".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
op: BinaryOperator::Gt, op: BinaryOperator::Gt,
right: Box::new(Expr::BinaryOp { right: Box::new(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident { left: Box::new(Expr::Identifier(Ident {
value: "d1_date".to_string(), value: "d1_date".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
op: BinaryOperator::Plus, op: BinaryOperator::Plus,
right: Box::new(Expr::Interval(Interval { right: Box::new(Expr::Interval(Interval {
@ -5617,6 +5648,7 @@ fn parse_at_timezone() {
alias: Ident { alias: Ident {
value: "hour".to_string(), value: "hour".to_string(),
quote_style: Some('"'), quote_style: Some('"'),
span: Span::empty(),
}, },
}, },
only(&select.projection), only(&select.projection),
@ -6637,12 +6669,14 @@ fn parse_recursive_cte() {
name: Ident { name: Ident {
value: "nums".to_string(), value: "nums".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
columns: vec![TableAliasColumnDef::from_name("val")], columns: vec![TableAliasColumnDef::from_name("val")],
}, },
query: Box::new(cte_query), query: Box::new(cte_query),
from: None, from: None,
materialized: None, materialized: None,
closing_paren_token: AttachedToken::empty(),
}; };
assert_eq!(with.cte_tables.first().unwrap(), &expected); assert_eq!(with.cte_tables.first().unwrap(), &expected);
} }
@ -7616,22 +7650,18 @@ fn lateral_function() {
let sql = "SELECT * FROM customer LEFT JOIN LATERAL generate_series(1, customer.id)"; let sql = "SELECT * FROM customer LEFT JOIN LATERAL generate_series(1, customer.id)";
let actual_select_only = verified_only_select(sql); let actual_select_only = verified_only_select(sql);
let expected = Select { let expected = Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())],
top_before_distinct: false, top_before_distinct: false,
projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions {
opt_ilike: None,
opt_exclude: None,
opt_except: None,
opt_rename: None,
opt_replace: None,
})],
into: None, into: None,
from: vec![TableWithJoins { from: vec![TableWithJoins {
relation: TableFactor::Table { relation: TableFactor::Table {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "customer".to_string(), value: "customer".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}]), }]),
alias: None, alias: None,
args: None, args: None,
@ -8270,10 +8300,12 @@ fn parse_grant() {
Ident { Ident {
value: "shape".into(), value: "shape".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
Ident { Ident {
value: "size".into(), value: "size".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
]) ])
}, },
@ -8467,6 +8499,7 @@ fn parse_merge() {
subquery: Box::new(Query { subquery: Box::new(Query {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -8515,6 +8548,7 @@ fn parse_merge() {
name: Ident { name: Ident {
value: "stg".to_string(), value: "stg".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
columns: vec![], columns: vec![],
}), }),
@ -8714,7 +8748,8 @@ fn test_lock_table() {
lock.of.unwrap().0, lock.of.unwrap().0,
vec![Ident { vec![Ident {
value: "school".to_string(), value: "school".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}] }]
); );
assert!(lock.nonblock.is_none()); assert!(lock.nonblock.is_none());
@ -8728,7 +8763,8 @@ fn test_lock_table() {
lock.of.unwrap().0, lock.of.unwrap().0,
vec![Ident { vec![Ident {
value: "school".to_string(), value: "school".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}] }]
); );
assert!(lock.nonblock.is_none()); assert!(lock.nonblock.is_none());
@ -8742,7 +8778,8 @@ fn test_lock_table() {
lock.of.unwrap().0, lock.of.unwrap().0,
vec![Ident { vec![Ident {
value: "school".to_string(), value: "school".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}] }]
); );
assert!(lock.nonblock.is_none()); assert!(lock.nonblock.is_none());
@ -8752,7 +8789,8 @@ fn test_lock_table() {
lock.of.unwrap().0, lock.of.unwrap().0,
vec![Ident { vec![Ident {
value: "student".to_string(), value: "student".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}] }]
); );
assert!(lock.nonblock.is_none()); assert!(lock.nonblock.is_none());
@ -8769,7 +8807,8 @@ fn test_lock_nonblock() {
lock.of.unwrap().0, lock.of.unwrap().0,
vec![Ident { vec![Ident {
value: "school".to_string(), value: "school".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}] }]
); );
assert_eq!(lock.nonblock.unwrap(), NonBlock::SkipLocked); assert_eq!(lock.nonblock.unwrap(), NonBlock::SkipLocked);
@ -8783,7 +8822,8 @@ fn test_lock_nonblock() {
lock.of.unwrap().0, lock.of.unwrap().0,
vec![Ident { vec![Ident {
value: "school".to_string(), value: "school".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}] }]
); );
assert_eq!(lock.nonblock.unwrap(), NonBlock::Nowait); assert_eq!(lock.nonblock.unwrap(), NonBlock::Nowait);
@ -9584,7 +9624,8 @@ fn parse_pivot_table() {
alias: Some(TableAlias { alias: Some(TableAlias {
name: Ident { name: Ident {
value: "p".to_string(), value: "p".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
columns: vec![ columns: vec![
TableAliasColumnDef::from_name("c"), TableAliasColumnDef::from_name("c"),
@ -9636,12 +9677,14 @@ fn parse_unpivot_table() {
}), }),
value: Ident { value: Ident {
value: "quantity".to_string(), value: "quantity".to_string(),
quote_style: None quote_style: None,
span: Span::empty()
}, },
name: Ident { name: Ident {
value: "quarter".to_string(), value: "quarter".to_string(),
quote_style: None quote_style: None,
span: Span::empty()
}, },
columns: ["Q1", "Q2", "Q3", "Q4"] columns: ["Q1", "Q2", "Q3", "Q4"]
.into_iter() .into_iter()
@ -9704,12 +9747,14 @@ fn parse_pivot_unpivot_table() {
}), }),
value: Ident { value: Ident {
value: "population".to_string(), value: "population".to_string(),
quote_style: None quote_style: None,
span: Span::empty()
}, },
name: Ident { name: Ident {
value: "year".to_string(), value: "year".to_string(),
quote_style: None quote_style: None,
span: Span::empty()
}, },
columns: ["population_2000", "population_2010"] columns: ["population_2000", "population_2010"]
.into_iter() .into_iter()
@ -9999,10 +10044,12 @@ fn parse_execute_stored_procedure() {
Ident { Ident {
value: "my_schema".to_string(), value: "my_schema".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
Ident { Ident {
value: "my_stored_procedure".to_string(), value: "my_stored_procedure".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
]), ]),
parameters: vec![ parameters: vec![
@ -10098,6 +10145,7 @@ fn parse_unload() {
Statement::Unload { Statement::Unload {
query: Box::new(Query { query: Box::new(Query {
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -10143,12 +10191,14 @@ fn parse_unload() {
}), }),
to: Ident { to: Ident {
value: "s3://...".to_string(), value: "s3://...".to_string(),
quote_style: Some('\'') quote_style: Some('\''),
span: Span::empty(),
}, },
with: vec![SqlOption::KeyValue { with: vec![SqlOption::KeyValue {
key: Ident { key: Ident {
value: "format".to_string(), value: "format".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
value: Expr::Value(Value::SingleQuotedString("AVRO".to_string())) value: Expr::Value(Value::SingleQuotedString("AVRO".to_string()))
}] }]
@ -10275,6 +10325,7 @@ fn parse_map_access_expr() {
#[test] #[test]
fn parse_connect_by() { fn parse_connect_by() {
let expect_query = Select { let expect_query = Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -10363,6 +10414,7 @@ fn parse_connect_by() {
assert_eq!( assert_eq!(
all_dialects_where(|d| d.supports_connect_by()).verified_only_select(connect_by_3), all_dialects_where(|d| d.supports_connect_by()).verified_only_select(connect_by_3),
Select { Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -11206,6 +11258,7 @@ fn test_extract_seconds_ok() {
field: DateTimeField::Custom(Ident { field: DateTimeField::Custom(Ident {
value: "seconds".to_string(), value: "seconds".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}), }),
syntax: ExtractSyntax::From, syntax: ExtractSyntax::From,
expr: Box::new(Expr::Cast { expr: Box::new(Expr::Cast {
@ -11231,6 +11284,7 @@ fn test_extract_seconds_single_quote_ok() {
field: DateTimeField::Custom(Ident { field: DateTimeField::Custom(Ident {
value: "seconds".to_string(), value: "seconds".to_string(),
quote_style: Some('\''), quote_style: Some('\''),
span: Span::empty(),
}), }),
syntax: ExtractSyntax::From, syntax: ExtractSyntax::From,
expr: Box::new(Expr::Cast { expr: Box::new(Expr::Cast {
@ -12130,7 +12184,8 @@ fn test_load_extension() {
assert_eq!( assert_eq!(
Ident { Ident {
value: "filename".to_string(), value: "filename".to_string(),
quote_style: Some('\'') quote_style: Some('\''),
span: Span::empty(),
}, },
extension_name extension_name
); );

View file

@ -18,6 +18,8 @@
#[macro_use] #[macro_use]
mod test_utils; mod test_utils;
use helpers::attached_token::AttachedToken;
use sqlparser::tokenizer::Span;
use test_utils::*; use test_utils::*;
use sqlparser::ast::*; use sqlparser::ast::*;
@ -259,22 +261,18 @@ fn test_select_union_by_name() {
op: SetOperator::Union, op: SetOperator::Union,
set_quantifier: *expected_quantifier, set_quantifier: *expected_quantifier,
left: Box::<SetExpr>::new(SetExpr::Select(Box::new(Select { left: Box::<SetExpr>::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())],
top_before_distinct: false, top_before_distinct: false,
projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions {
opt_ilike: None,
opt_exclude: None,
opt_except: None,
opt_rename: None,
opt_replace: None,
})],
into: None, into: None,
from: vec![TableWithJoins { from: vec![TableWithJoins {
relation: TableFactor::Table { relation: TableFactor::Table {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "capitals".to_string(), value: "capitals".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}]), }]),
alias: None, alias: None,
args: None, args: None,
@ -301,22 +299,18 @@ fn test_select_union_by_name() {
connect_by: None, connect_by: None,
}))), }))),
right: Box::<SetExpr>::new(SetExpr::Select(Box::new(Select { right: Box::<SetExpr>::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())],
top_before_distinct: false, top_before_distinct: false,
projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions {
opt_ilike: None,
opt_exclude: None,
opt_except: None,
opt_rename: None,
opt_replace: None,
})],
into: None, into: None,
from: vec![TableWithJoins { from: vec![TableWithJoins {
relation: TableFactor::Table { relation: TableFactor::Table {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "weather".to_string(), value: "weather".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}]), }]),
alias: None, alias: None,
args: None, args: None,
@ -355,12 +349,28 @@ fn test_duckdb_install() {
Statement::Install { Statement::Install {
extension_name: Ident { extension_name: Ident {
value: "tpch".to_string(), value: "tpch".to_string(),
quote_style: None quote_style: None,
span: Span::empty()
} }
} }
); );
} }
#[test]
fn test_duckdb_load_extension() {
let stmt = duckdb().verified_stmt("LOAD my_extension");
assert_eq!(
Statement::Load {
extension_name: Ident {
value: "my_extension".to_string(),
quote_style: None,
span: Span::empty()
}
},
stmt
);
}
#[test] #[test]
fn test_duckdb_struct_literal() { fn test_duckdb_struct_literal() {
//struct literal syntax https://duckdb.org/docs/sql/data_types/struct#creating-structs //struct literal syntax https://duckdb.org/docs/sql/data_types/struct#creating-structs

View file

@ -22,6 +22,8 @@
#[macro_use] #[macro_use]
mod test_utils; mod test_utils;
use helpers::attached_token::AttachedToken;
use sqlparser::tokenizer::Span;
use test_utils::*; use test_utils::*;
use sqlparser::ast::DataType::{Int, Text}; use sqlparser::ast::DataType::{Int, Text};
@ -113,6 +115,7 @@ fn parse_create_procedure() {
settings: None, settings: None,
format_clause: None, format_clause: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -138,14 +141,16 @@ fn parse_create_procedure() {
ProcedureParam { ProcedureParam {
name: Ident { name: Ident {
value: "@foo".into(), value: "@foo".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
data_type: DataType::Int(None) data_type: DataType::Int(None)
}, },
ProcedureParam { ProcedureParam {
name: Ident { name: Ident {
value: "@bar".into(), value: "@bar".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
data_type: DataType::Varchar(Some(CharacterLength::IntegerLength { data_type: DataType::Varchar(Some(CharacterLength::IntegerLength {
length: 256, length: 256,
@ -155,7 +160,8 @@ fn parse_create_procedure() {
]), ]),
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "test".into(), value: "test".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}]) }])
} }
) )
@ -204,15 +210,9 @@ fn parse_mssql_openjson() {
assert_eq!( assert_eq!(
vec![TableWithJoins { vec![TableWithJoins {
relation: TableFactor::Table { relation: TableFactor::Table {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident::new("t_test_table")]),
value: "t_test_table".into(),
quote_style: None,
},]),
alias: Some(TableAlias { alias: Some(TableAlias {
name: Ident { name: Ident::new("A"),
value: "A".into(),
quote_style: None
},
columns: vec![] columns: vec![]
}), }),
args: None, args: None,
@ -224,23 +224,13 @@ fn parse_mssql_openjson() {
}, },
joins: vec![Join { joins: vec![Join {
relation: TableFactor::OpenJsonTable { relation: TableFactor::OpenJsonTable {
json_expr: Expr::CompoundIdentifier(vec![ json_expr: Expr::CompoundIdentifier(
Ident { vec![Ident::new("A"), Ident::new("param"),]
value: "A".into(), ),
quote_style: None,
},
Ident {
value: "param".into(),
quote_style: None,
}
]),
json_path: Some(Value::SingleQuotedString("$.config".into())), json_path: Some(Value::SingleQuotedString("$.config".into())),
columns: vec![ columns: vec![
OpenJsonTableColumn { OpenJsonTableColumn {
name: Ident { name: Ident::new("kind"),
value: "kind".into(),
quote_style: None,
},
r#type: DataType::Varchar(Some(CharacterLength::IntegerLength { r#type: DataType::Varchar(Some(CharacterLength::IntegerLength {
length: 20, length: 20,
unit: None unit: None
@ -252,6 +242,7 @@ fn parse_mssql_openjson() {
name: Ident { name: Ident {
value: "id_list".into(), value: "id_list".into(),
quote_style: Some('['), quote_style: Some('['),
span: Span::empty(),
}, },
r#type: DataType::Nvarchar(Some(CharacterLength::Max)), r#type: DataType::Nvarchar(Some(CharacterLength::Max)),
path: Some("$.id_list".into()), path: Some("$.id_list".into()),
@ -259,10 +250,7 @@ fn parse_mssql_openjson() {
} }
], ],
alias: Some(TableAlias { alias: Some(TableAlias {
name: Ident { name: Ident::new("B"),
value: "B".into(),
quote_style: None
},
columns: vec![] columns: vec![]
}) })
}, },
@ -280,15 +268,9 @@ fn parse_mssql_openjson() {
assert_eq!( assert_eq!(
vec![TableWithJoins { vec![TableWithJoins {
relation: TableFactor::Table { relation: TableFactor::Table {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident::new("t_test_table"),]),
value: "t_test_table".into(),
quote_style: None,
},]),
alias: Some(TableAlias { alias: Some(TableAlias {
name: Ident { name: Ident::new("A"),
value: "A".into(),
quote_style: None
},
columns: vec![] columns: vec![]
}), }),
args: None, args: None,
@ -300,23 +282,13 @@ fn parse_mssql_openjson() {
}, },
joins: vec![Join { joins: vec![Join {
relation: TableFactor::OpenJsonTable { relation: TableFactor::OpenJsonTable {
json_expr: Expr::CompoundIdentifier(vec![ json_expr: Expr::CompoundIdentifier(
Ident { vec![Ident::new("A"), Ident::new("param"),]
value: "A".into(), ),
quote_style: None,
},
Ident {
value: "param".into(),
quote_style: None,
}
]),
json_path: None, json_path: None,
columns: vec![ columns: vec![
OpenJsonTableColumn { OpenJsonTableColumn {
name: Ident { name: Ident::new("kind"),
value: "kind".into(),
quote_style: None,
},
r#type: DataType::Varchar(Some(CharacterLength::IntegerLength { r#type: DataType::Varchar(Some(CharacterLength::IntegerLength {
length: 20, length: 20,
unit: None unit: None
@ -328,6 +300,7 @@ fn parse_mssql_openjson() {
name: Ident { name: Ident {
value: "id_list".into(), value: "id_list".into(),
quote_style: Some('['), quote_style: Some('['),
span: Span::empty(),
}, },
r#type: DataType::Nvarchar(Some(CharacterLength::Max)), r#type: DataType::Nvarchar(Some(CharacterLength::Max)),
path: Some("$.id_list".into()), path: Some("$.id_list".into()),
@ -335,10 +308,7 @@ fn parse_mssql_openjson() {
} }
], ],
alias: Some(TableAlias { alias: Some(TableAlias {
name: Ident { name: Ident::new("B"),
value: "B".into(),
quote_style: None
},
columns: vec![] columns: vec![]
}) })
}, },
@ -356,15 +326,10 @@ fn parse_mssql_openjson() {
assert_eq!( assert_eq!(
vec![TableWithJoins { vec![TableWithJoins {
relation: TableFactor::Table { relation: TableFactor::Table {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident::new("t_test_table")]),
value: "t_test_table".into(),
quote_style: None,
},]),
alias: Some(TableAlias { alias: Some(TableAlias {
name: Ident { name: Ident::new("A"),
value: "A".into(),
quote_style: None
},
columns: vec![] columns: vec![]
}), }),
args: None, args: None,
@ -376,23 +341,13 @@ fn parse_mssql_openjson() {
}, },
joins: vec![Join { joins: vec![Join {
relation: TableFactor::OpenJsonTable { relation: TableFactor::OpenJsonTable {
json_expr: Expr::CompoundIdentifier(vec![ json_expr: Expr::CompoundIdentifier(
Ident { vec![Ident::new("A"), Ident::new("param"),]
value: "A".into(), ),
quote_style: None,
},
Ident {
value: "param".into(),
quote_style: None,
}
]),
json_path: None, json_path: None,
columns: vec![ columns: vec![
OpenJsonTableColumn { OpenJsonTableColumn {
name: Ident { name: Ident::new("kind"),
value: "kind".into(),
quote_style: None,
},
r#type: DataType::Varchar(Some(CharacterLength::IntegerLength { r#type: DataType::Varchar(Some(CharacterLength::IntegerLength {
length: 20, length: 20,
unit: None unit: None
@ -404,6 +359,7 @@ fn parse_mssql_openjson() {
name: Ident { name: Ident {
value: "id_list".into(), value: "id_list".into(),
quote_style: Some('['), quote_style: Some('['),
span: Span::empty(),
}, },
r#type: DataType::Nvarchar(Some(CharacterLength::Max)), r#type: DataType::Nvarchar(Some(CharacterLength::Max)),
path: None, path: None,
@ -411,10 +367,7 @@ fn parse_mssql_openjson() {
} }
], ],
alias: Some(TableAlias { alias: Some(TableAlias {
name: Ident { name: Ident::new("B"),
value: "B".into(),
quote_style: None
},
columns: vec![] columns: vec![]
}) })
}, },
@ -432,15 +385,9 @@ fn parse_mssql_openjson() {
assert_eq!( assert_eq!(
vec![TableWithJoins { vec![TableWithJoins {
relation: TableFactor::Table { relation: TableFactor::Table {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident::new("t_test_table")]),
value: "t_test_table".into(),
quote_style: None,
},]),
alias: Some(TableAlias { alias: Some(TableAlias {
name: Ident { name: Ident::new("A"),
value: "A".into(),
quote_style: None
},
columns: vec![] columns: vec![]
}), }),
args: None, args: None,
@ -452,23 +399,13 @@ fn parse_mssql_openjson() {
}, },
joins: vec![Join { joins: vec![Join {
relation: TableFactor::OpenJsonTable { relation: TableFactor::OpenJsonTable {
json_expr: Expr::CompoundIdentifier(vec![ json_expr: Expr::CompoundIdentifier(
Ident { vec![Ident::new("A"), Ident::new("param"),]
value: "A".into(), ),
quote_style: None,
},
Ident {
value: "param".into(),
quote_style: None,
}
]),
json_path: Some(Value::SingleQuotedString("$.config".into())), json_path: Some(Value::SingleQuotedString("$.config".into())),
columns: vec![], columns: vec![],
alias: Some(TableAlias { alias: Some(TableAlias {
name: Ident { name: Ident::new("B"),
value: "B".into(),
quote_style: None
},
columns: vec![] columns: vec![]
}) })
}, },
@ -486,15 +423,9 @@ fn parse_mssql_openjson() {
assert_eq!( assert_eq!(
vec![TableWithJoins { vec![TableWithJoins {
relation: TableFactor::Table { relation: TableFactor::Table {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident::new("t_test_table")]),
value: "t_test_table".into(),
quote_style: None,
},]),
alias: Some(TableAlias { alias: Some(TableAlias {
name: Ident { name: Ident::new("A"),
value: "A".into(),
quote_style: None
},
columns: vec![] columns: vec![]
}), }),
args: None, args: None,
@ -506,23 +437,13 @@ fn parse_mssql_openjson() {
}, },
joins: vec![Join { joins: vec![Join {
relation: TableFactor::OpenJsonTable { relation: TableFactor::OpenJsonTable {
json_expr: Expr::CompoundIdentifier(vec![ json_expr: Expr::CompoundIdentifier(
Ident { vec![Ident::new("A"), Ident::new("param"),]
value: "A".into(), ),
quote_style: None,
},
Ident {
value: "param".into(),
quote_style: None,
}
]),
json_path: None, json_path: None,
columns: vec![], columns: vec![],
alias: Some(TableAlias { alias: Some(TableAlias {
name: Ident { name: Ident::new("B"),
value: "B".into(),
quote_style: None
},
columns: vec![] columns: vec![]
}) })
}, },
@ -607,7 +528,8 @@ fn parse_mssql_create_role() {
authorization_owner, authorization_owner,
Some(ObjectName(vec![Ident { Some(ObjectName(vec![Ident {
value: "helena".into(), value: "helena".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}])) }]))
); );
} }
@ -623,12 +545,14 @@ fn parse_alter_role() {
[Statement::AlterRole { [Statement::AlterRole {
name: Ident { name: Ident {
value: "old_name".into(), value: "old_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
operation: AlterRoleOperation::RenameRole { operation: AlterRoleOperation::RenameRole {
role_name: Ident { role_name: Ident {
value: "new_name".into(), value: "new_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
} }
}, },
}] }]
@ -640,12 +564,14 @@ fn parse_alter_role() {
Statement::AlterRole { Statement::AlterRole {
name: Ident { name: Ident {
value: "role_name".into(), value: "role_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
operation: AlterRoleOperation::AddMember { operation: AlterRoleOperation::AddMember {
member_name: Ident { member_name: Ident {
value: "new_member".into(), value: "new_member".into(),
quote_style: None quote_style: None,
span: Span::empty(),
} }
}, },
} }
@ -657,12 +583,14 @@ fn parse_alter_role() {
Statement::AlterRole { Statement::AlterRole {
name: Ident { name: Ident {
value: "role_name".into(), value: "role_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
operation: AlterRoleOperation::DropMember { operation: AlterRoleOperation::DropMember {
member_name: Ident { member_name: Ident {
value: "old_member".into(), value: "old_member".into(),
quote_style: None quote_style: None,
span: Span::empty(),
} }
}, },
} }
@ -1137,13 +1065,15 @@ fn parse_substring_in_select() {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: Some(Distinct::Distinct), distinct: Some(Distinct::Distinct),
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
projection: vec![SelectItem::UnnamedExpr(Expr::Substring { projection: vec![SelectItem::UnnamedExpr(Expr::Substring {
expr: Box::new(Expr::Identifier(Ident { expr: Box::new(Expr::Identifier(Ident {
value: "description".to_string(), value: "description".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
})), })),
substring_from: Some(Box::new(Expr::Value(number("0")))), substring_from: Some(Box::new(Expr::Value(number("0")))),
substring_for: Some(Box::new(Expr::Value(number("1")))), substring_for: Some(Box::new(Expr::Value(number("1")))),
@ -1154,7 +1084,8 @@ fn parse_substring_in_select() {
relation: TableFactor::Table { relation: TableFactor::Table {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "test".to_string(), value: "test".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
alias: None, alias: None,
args: None, args: None,
@ -1208,7 +1139,8 @@ fn parse_mssql_declare() {
Declare { Declare {
names: vec![Ident { names: vec![Ident {
value: "@foo".to_string(), value: "@foo".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}], }],
data_type: None, data_type: None,
assignment: None, assignment: None,
@ -1222,7 +1154,8 @@ fn parse_mssql_declare() {
Declare { Declare {
names: vec![Ident { names: vec![Ident {
value: "@bar".to_string(), value: "@bar".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}], }],
data_type: Some(Int(None)), data_type: Some(Int(None)),
assignment: None, assignment: None,
@ -1236,7 +1169,8 @@ fn parse_mssql_declare() {
Declare { Declare {
names: vec![Ident { names: vec![Ident {
value: "@baz".to_string(), value: "@baz".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}], }],
data_type: Some(Text), data_type: Some(Text),
assignment: Some(MsSqlAssignment(Box::new(Expr::Value(SingleQuotedString( assignment: Some(MsSqlAssignment(Box::new(Expr::Value(SingleQuotedString(
@ -1260,10 +1194,7 @@ fn parse_mssql_declare() {
vec![ vec![
Statement::Declare { Statement::Declare {
stmts: vec![Declare { stmts: vec![Declare {
names: vec![Ident { names: vec![Ident::new("@bar"),],
value: "@bar".to_string(),
quote_style: None
}],
data_type: Some(Int(None)), data_type: Some(Int(None)),
assignment: None, assignment: None,
declare_type: None, declare_type: None,
@ -1292,6 +1223,7 @@ fn parse_mssql_declare() {
settings: None, settings: None,
format_clause: None, format_clause: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -1364,10 +1296,12 @@ fn parse_create_table_with_valid_options() {
key: Ident { key: Ident {
value: "DISTRIBUTION".to_string(), value: "DISTRIBUTION".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
value: Expr::Identifier(Ident { value: Expr::Identifier(Ident {
value: "ROUND_ROBIN".to_string(), value: "ROUND_ROBIN".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}) })
}, },
SqlOption::Partition { SqlOption::Partition {
@ -1411,6 +1345,7 @@ fn parse_create_table_with_valid_options() {
name: Ident { name: Ident {
value: "column_a".to_string(), value: "column_a".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
asc: Some(true), asc: Some(true),
}, },
@ -1418,6 +1353,7 @@ fn parse_create_table_with_valid_options() {
name: Ident { name: Ident {
value: "column_b".to_string(), value: "column_b".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
asc: Some(false), asc: Some(false),
}, },
@ -1425,6 +1361,7 @@ fn parse_create_table_with_valid_options() {
name: Ident { name: Ident {
value: "column_c".to_string(), value: "column_c".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
asc: None, asc: None,
}, },
@ -1438,6 +1375,7 @@ fn parse_create_table_with_valid_options() {
key: Ident { key: Ident {
value: "DISTRIBUTION".to_string(), value: "DISTRIBUTION".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
value: Expr::Function( value: Expr::Function(
Function { Function {
@ -1446,6 +1384,7 @@ fn parse_create_table_with_valid_options() {
Ident { Ident {
value: "HASH".to_string(), value: "HASH".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
], ],
), ),
@ -1460,6 +1399,7 @@ fn parse_create_table_with_valid_options() {
Ident { Ident {
value: "column_a".to_string(), value: "column_a".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
), ),
), ),
@ -1470,6 +1410,7 @@ fn parse_create_table_with_valid_options() {
Ident { Ident {
value: "column_b".to_string(), value: "column_b".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
), ),
), ),
@ -1504,12 +1445,14 @@ fn parse_create_table_with_valid_options() {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "mytable".to_string(), value: "mytable".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
},],), },],),
columns: vec![ columns: vec![
ColumnDef { ColumnDef {
name: Ident { name: Ident {
value: "column_a".to_string(), value: "column_a".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
data_type: Int(None,), data_type: Int(None,),
collation: None, collation: None,
@ -1519,6 +1462,7 @@ fn parse_create_table_with_valid_options() {
name: Ident { name: Ident {
value: "column_b".to_string(), value: "column_b".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
data_type: Int(None,), data_type: Int(None,),
collation: None, collation: None,
@ -1528,6 +1472,7 @@ fn parse_create_table_with_valid_options() {
name: Ident { name: Ident {
value: "column_c".to_string(), value: "column_c".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
data_type: Int(None,), data_type: Int(None,),
collation: None, collation: None,
@ -1669,11 +1614,13 @@ fn parse_create_table_with_identity_column() {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "mytable".to_string(), value: "mytable".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
},],), },],),
columns: vec![ColumnDef { columns: vec![ColumnDef {
name: Ident { name: Ident {
value: "columnA".to_string(), value: "columnA".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
data_type: Int(None,), data_type: Int(None,),
collation: None, collation: None,

View file

@ -19,12 +19,14 @@
//! Test SQL syntax specific to MySQL. The parser based on the generic dialect //! Test SQL syntax specific to MySQL. The parser based on the generic dialect
//! is also tested (on the inputs it can handle). //! is also tested (on the inputs it can handle).
use helpers::attached_token::AttachedToken;
use matches::assert_matches; use matches::assert_matches;
use sqlparser::ast::MysqlInsertPriority::{Delayed, HighPriority, LowPriority}; use sqlparser::ast::MysqlInsertPriority::{Delayed, HighPriority, LowPriority};
use sqlparser::ast::*; use sqlparser::ast::*;
use sqlparser::dialect::{GenericDialect, MySqlDialect}; use sqlparser::dialect::{GenericDialect, MySqlDialect};
use sqlparser::parser::{ParserError, ParserOptions}; use sqlparser::parser::{ParserError, ParserOptions};
use sqlparser::tokenizer::Span;
use sqlparser::tokenizer::Token; use sqlparser::tokenizer::Token;
use test_utils::*; use test_utils::*;
@ -142,16 +144,19 @@ fn parse_flush() {
ObjectName(vec![ ObjectName(vec![
Ident { Ident {
value: "mek".to_string(), value: "mek".to_string(),
quote_style: Some('`') quote_style: Some('`'),
span: Span::empty(),
}, },
Ident { Ident {
value: "table1".to_string(), value: "table1".to_string(),
quote_style: Some('`') quote_style: Some('`'),
span: Span::empty(),
} }
]), ]),
ObjectName(vec![Ident { ObjectName(vec![Ident {
value: "table2".to_string(), value: "table2".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]) }])
] ]
} }
@ -179,16 +184,19 @@ fn parse_flush() {
ObjectName(vec![ ObjectName(vec![
Ident { Ident {
value: "mek".to_string(), value: "mek".to_string(),
quote_style: Some('`') quote_style: Some('`'),
span: Span::empty(),
}, },
Ident { Ident {
value: "table1".to_string(), value: "table1".to_string(),
quote_style: Some('`') quote_style: Some('`'),
span: Span::empty(),
} }
]), ]),
ObjectName(vec![Ident { ObjectName(vec![Ident {
value: "table2".to_string(), value: "table2".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]) }])
] ]
} }
@ -205,16 +213,19 @@ fn parse_flush() {
ObjectName(vec![ ObjectName(vec![
Ident { Ident {
value: "mek".to_string(), value: "mek".to_string(),
quote_style: Some('`') quote_style: Some('`'),
span: Span::empty(),
}, },
Ident { Ident {
value: "table1".to_string(), value: "table1".to_string(),
quote_style: Some('`') quote_style: Some('`'),
span: Span::empty(),
} }
]), ]),
ObjectName(vec![Ident { ObjectName(vec![Ident {
value: "table2".to_string(), value: "table2".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]) }])
] ]
} }
@ -1058,12 +1069,14 @@ fn parse_escaped_quote_identifiers_with_escape() {
Statement::Query(Box::new(Query { Statement::Query(Box::new(Query {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident { projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident {
value: "quoted ` identifier".into(), value: "quoted ` identifier".into(),
quote_style: Some('`'), quote_style: Some('`'),
span: Span::empty(),
}))], }))],
into: None, into: None,
from: vec![], from: vec![],
@ -1109,12 +1122,14 @@ fn parse_escaped_quote_identifiers_with_no_escape() {
Statement::Query(Box::new(Query { Statement::Query(Box::new(Query {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident { projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident {
value: "quoted `` identifier".into(), value: "quoted `` identifier".into(),
quote_style: Some('`'), quote_style: Some('`'),
span: Span::empty(),
}))], }))],
into: None, into: None,
from: vec![], from: vec![],
@ -1153,12 +1168,15 @@ fn parse_escaped_backticks_with_escape() {
Statement::Query(Box::new(Query { Statement::Query(Box::new(Query {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident { projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident {
value: "`quoted identifier`".into(), value: "`quoted identifier`".into(),
quote_style: Some('`'), quote_style: Some('`'),
span: Span::empty(),
}))], }))],
into: None, into: None,
from: vec![], from: vec![],
@ -1201,12 +1219,15 @@ fn parse_escaped_backticks_with_no_escape() {
Statement::Query(Box::new(Query { Statement::Query(Box::new(Query {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident { projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident {
value: "``quoted identifier``".into(), value: "``quoted identifier``".into(),
quote_style: Some('`'), quote_style: Some('`'),
span: Span::empty(),
}))], }))],
into: None, into: None,
from: vec![], from: vec![],
@ -1846,6 +1867,8 @@ fn parse_select_with_numeric_prefix_column_name() {
assert_eq!( assert_eq!(
q.body, q.body,
Box::new(SetExpr::Select(Box::new(Select { Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -1902,6 +1925,8 @@ fn parse_select_with_concatenation_of_exp_number_and_numeric_prefix_column() {
assert_eq!( assert_eq!(
q.body, q.body,
Box::new(SetExpr::Select(Box::new(Select { Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -2055,7 +2080,8 @@ fn parse_delete_with_order_by() {
vec![OrderByExpr { vec![OrderByExpr {
expr: Expr::Identifier(Ident { expr: Expr::Identifier(Ident {
value: "id".to_owned(), value: "id".to_owned(),
quote_style: None quote_style: None,
span: Span::empty(),
}), }),
asc: Some(false), asc: Some(false),
nulls_first: None, nulls_first: None,
@ -2136,7 +2162,8 @@ fn parse_alter_table_add_column() {
}, },
column_position: Some(MySQLColumnPosition::After(Ident { column_position: Some(MySQLColumnPosition::After(Ident {
value: String::from("foo"), value: String::from("foo"),
quote_style: None quote_style: None,
span: Span::empty(),
})), })),
},] },]
); );
@ -2187,6 +2214,7 @@ fn parse_alter_table_add_columns() {
column_position: Some(MySQLColumnPosition::After(Ident { column_position: Some(MySQLColumnPosition::After(Ident {
value: String::from("foo"), value: String::from("foo"),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
}, },
] ]
@ -2247,6 +2275,7 @@ fn parse_alter_table_change_column() {
column_position: Some(MySQLColumnPosition::After(Ident { column_position: Some(MySQLColumnPosition::After(Ident {
value: String::from("foo"), value: String::from("foo"),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
}; };
let sql4 = "ALTER TABLE orders CHANGE COLUMN description desc TEXT NOT NULL AFTER foo"; let sql4 = "ALTER TABLE orders CHANGE COLUMN description desc TEXT NOT NULL AFTER foo";
@ -2286,6 +2315,7 @@ fn parse_alter_table_change_column_with_column_position() {
column_position: Some(MySQLColumnPosition::After(Ident { column_position: Some(MySQLColumnPosition::After(Ident {
value: String::from("total_count"), value: String::from("total_count"),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
}; };
@ -2342,6 +2372,7 @@ fn parse_alter_table_modify_column() {
column_position: Some(MySQLColumnPosition::After(Ident { column_position: Some(MySQLColumnPosition::After(Ident {
value: String::from("foo"), value: String::from("foo"),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
}; };
let sql4 = "ALTER TABLE orders MODIFY COLUMN description TEXT NOT NULL AFTER foo"; let sql4 = "ALTER TABLE orders MODIFY COLUMN description TEXT NOT NULL AFTER foo";
@ -2379,6 +2410,7 @@ fn parse_alter_table_modify_column_with_column_position() {
column_position: Some(MySQLColumnPosition::After(Ident { column_position: Some(MySQLColumnPosition::After(Ident {
value: String::from("total_count"), value: String::from("total_count"),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
}; };
@ -2397,6 +2429,8 @@ fn parse_alter_table_modify_column_with_column_position() {
#[test] #[test]
fn parse_substring_in_select() { fn parse_substring_in_select() {
use sqlparser::tokenizer::Span;
let sql = "SELECT DISTINCT SUBSTRING(description, 0, 1) FROM test"; let sql = "SELECT DISTINCT SUBSTRING(description, 0, 1) FROM test";
match mysql().one_statement_parses_to( match mysql().one_statement_parses_to(
sql, sql,
@ -2407,13 +2441,15 @@ fn parse_substring_in_select() {
Box::new(Query { Box::new(Query {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: Some(Distinct::Distinct), distinct: Some(Distinct::Distinct),
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
projection: vec![SelectItem::UnnamedExpr(Expr::Substring { projection: vec![SelectItem::UnnamedExpr(Expr::Substring {
expr: Box::new(Expr::Identifier(Ident { expr: Box::new(Expr::Identifier(Ident {
value: "description".to_string(), value: "description".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
})), })),
substring_from: Some(Box::new(Expr::Value(number("0")))), substring_from: Some(Box::new(Expr::Value(number("0")))),
substring_for: Some(Box::new(Expr::Value(number("1")))), substring_for: Some(Box::new(Expr::Value(number("1")))),
@ -2424,7 +2460,8 @@ fn parse_substring_in_select() {
relation: TableFactor::Table { relation: TableFactor::Table {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "test".to_string(), value: "test".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
alias: None, alias: None,
args: None, args: None,
@ -2730,6 +2767,7 @@ fn parse_hex_string_introducer() {
Statement::Query(Box::new(Query { Statement::Query(Box::new(Query {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,

View file

@ -21,6 +21,8 @@
#[macro_use] #[macro_use]
mod test_utils; mod test_utils;
use helpers::attached_token::AttachedToken;
use sqlparser::tokenizer::Span;
use test_utils::*; use test_utils::*;
use sqlparser::ast::*; use sqlparser::ast::*;
@ -1163,6 +1165,7 @@ fn parse_copy_to() {
source: CopySource::Query(Box::new(Query { source: CopySource::Query(Box::new(Query {
with: None, with: None,
body: Box::new(SetExpr::Select(Box::new(Select { body: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -1172,6 +1175,7 @@ fn parse_copy_to() {
alias: Ident { alias: Ident {
value: "a".into(), value: "a".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
}, },
SelectItem::ExprWithAlias { SelectItem::ExprWithAlias {
@ -1179,6 +1183,7 @@ fn parse_copy_to() {
alias: Ident { alias: Ident {
value: "b".into(), value: "b".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
} }
], ],
@ -1318,7 +1323,8 @@ fn parse_set() {
variables: OneOrManyWithParens::One(ObjectName(vec![Ident::new("a")])), variables: OneOrManyWithParens::One(ObjectName(vec![Ident::new("a")])),
value: vec![Expr::Identifier(Ident { value: vec![Expr::Identifier(Ident {
value: "b".into(), value: "b".into(),
quote_style: None quote_style: None,
span: Span::empty(),
})], })],
} }
); );
@ -1380,7 +1386,8 @@ fn parse_set() {
])), ])),
value: vec![Expr::Identifier(Ident { value: vec![Expr::Identifier(Ident {
value: "b".into(), value: "b".into(),
quote_style: None quote_style: None,
span: Span::empty(),
})], })],
} }
); );
@ -1452,6 +1459,7 @@ fn parse_set_role() {
role_name: Some(Ident { role_name: Some(Ident {
value: "rolename".to_string(), value: "rolename".to_string(),
quote_style: Some('\"'), quote_style: Some('\"'),
span: Span::empty(),
}), }),
} }
); );
@ -1466,6 +1474,7 @@ fn parse_set_role() {
role_name: Some(Ident { role_name: Some(Ident {
value: "rolename".to_string(), value: "rolename".to_string(),
quote_style: Some('\''), quote_style: Some('\''),
span: Span::empty(),
}), }),
} }
); );
@ -1765,7 +1774,8 @@ fn parse_pg_on_conflict() {
selection: Some(Expr::BinaryOp { selection: Some(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident { left: Box::new(Expr::Identifier(Ident {
value: "dsize".to_string(), value: "dsize".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
})), })),
op: BinaryOperator::Gt, op: BinaryOperator::Gt,
right: Box::new(Expr::Value(Value::Placeholder("$2".to_string()))) right: Box::new(Expr::Value(Value::Placeholder("$2".to_string())))
@ -1802,7 +1812,8 @@ fn parse_pg_on_conflict() {
selection: Some(Expr::BinaryOp { selection: Some(Expr::BinaryOp {
left: Box::new(Expr::Identifier(Ident { left: Box::new(Expr::Identifier(Ident {
value: "dsize".to_string(), value: "dsize".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
})), })),
op: BinaryOperator::Gt, op: BinaryOperator::Gt,
right: Box::new(Expr::Value(Value::Placeholder("$2".to_string()))) right: Box::new(Expr::Value(Value::Placeholder("$2".to_string())))
@ -2105,14 +2116,16 @@ fn parse_array_index_expr() {
subscript: Box::new(Subscript::Index { subscript: Box::new(Subscript::Index {
index: Expr::Identifier(Ident { index: Expr::Identifier(Ident {
value: "baz".to_string(), value: "baz".to_string(),
quote_style: Some('"') quote_style: Some('"'),
span: Span::empty(),
}) })
}) })
}), }),
subscript: Box::new(Subscript::Index { subscript: Box::new(Subscript::Index {
index: Expr::Identifier(Ident { index: Expr::Identifier(Ident {
value: "fooz".to_string(), value: "fooz".to_string(),
quote_style: Some('"') quote_style: Some('"'),
span: Span::empty(),
}) })
}) })
}, },
@ -2504,6 +2517,7 @@ fn parse_array_subquery_expr() {
op: SetOperator::Union, op: SetOperator::Union,
set_quantifier: SetQuantifier::None, set_quantifier: SetQuantifier::None,
left: Box::new(SetExpr::Select(Box::new(Select { left: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -2525,6 +2539,7 @@ fn parse_array_subquery_expr() {
connect_by: None, connect_by: None,
}))), }))),
right: Box::new(SetExpr::Select(Box::new(Select { right: Box::new(SetExpr::Select(Box::new(Select {
select_token: AttachedToken::empty(),
distinct: None, distinct: None,
top: None, top: None,
top_before_distinct: false, top_before_distinct: false,
@ -3123,6 +3138,7 @@ fn parse_custom_operator() {
left: Box::new(Expr::Identifier(Ident { left: Box::new(Expr::Identifier(Ident {
value: "relname".into(), value: "relname".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
op: BinaryOperator::PGCustomBinaryOperator(vec![ op: BinaryOperator::PGCustomBinaryOperator(vec![
"database".into(), "database".into(),
@ -3142,6 +3158,7 @@ fn parse_custom_operator() {
left: Box::new(Expr::Identifier(Ident { left: Box::new(Expr::Identifier(Ident {
value: "relname".into(), value: "relname".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
op: BinaryOperator::PGCustomBinaryOperator(vec!["pg_catalog".into(), "~".into()]), op: BinaryOperator::PGCustomBinaryOperator(vec!["pg_catalog".into(), "~".into()]),
right: Box::new(Expr::Value(Value::SingleQuotedString("^(table)$".into()))) right: Box::new(Expr::Value(Value::SingleQuotedString("^(table)$".into())))
@ -3157,6 +3174,7 @@ fn parse_custom_operator() {
left: Box::new(Expr::Identifier(Ident { left: Box::new(Expr::Identifier(Ident {
value: "relname".into(), value: "relname".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
op: BinaryOperator::PGCustomBinaryOperator(vec!["~".into()]), op: BinaryOperator::PGCustomBinaryOperator(vec!["~".into()]),
right: Box::new(Expr::Value(Value::SingleQuotedString("^(table)$".into()))) right: Box::new(Expr::Value(Value::SingleQuotedString("^(table)$".into())))
@ -3307,12 +3325,14 @@ fn parse_alter_role() {
Statement::AlterRole { Statement::AlterRole {
name: Ident { name: Ident {
value: "old_name".into(), value: "old_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
operation: AlterRoleOperation::RenameRole { operation: AlterRoleOperation::RenameRole {
role_name: Ident { role_name: Ident {
value: "new_name".into(), value: "new_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
} }
}, },
} }
@ -3324,7 +3344,8 @@ fn parse_alter_role() {
Statement::AlterRole { Statement::AlterRole {
name: Ident { name: Ident {
value: "role_name".into(), value: "role_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
operation: AlterRoleOperation::WithOptions { operation: AlterRoleOperation::WithOptions {
options: vec![ options: vec![
@ -3353,7 +3374,8 @@ fn parse_alter_role() {
Statement::AlterRole { Statement::AlterRole {
name: Ident { name: Ident {
value: "role_name".into(), value: "role_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
operation: AlterRoleOperation::WithOptions { operation: AlterRoleOperation::WithOptions {
options: vec![ options: vec![
@ -3376,12 +3398,14 @@ fn parse_alter_role() {
Statement::AlterRole { Statement::AlterRole {
name: Ident { name: Ident {
value: "role_name".into(), value: "role_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
operation: AlterRoleOperation::Set { operation: AlterRoleOperation::Set {
config_name: ObjectName(vec![Ident { config_name: ObjectName(vec![Ident {
value: "maintenance_work_mem".into(), value: "maintenance_work_mem".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
config_value: SetConfigValue::FromCurrent, config_value: SetConfigValue::FromCurrent,
in_database: None in_database: None
@ -3395,17 +3419,20 @@ fn parse_alter_role() {
[Statement::AlterRole { [Statement::AlterRole {
name: Ident { name: Ident {
value: "role_name".into(), value: "role_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
operation: AlterRoleOperation::Set { operation: AlterRoleOperation::Set {
config_name: ObjectName(vec![Ident { config_name: ObjectName(vec![Ident {
value: "maintenance_work_mem".into(), value: "maintenance_work_mem".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
config_value: SetConfigValue::Value(Expr::Value(number("100000"))), config_value: SetConfigValue::Value(Expr::Value(number("100000"))),
in_database: Some(ObjectName(vec![Ident { in_database: Some(ObjectName(vec![Ident {
value: "database_name".into(), value: "database_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}])) }]))
}, },
}] }]
@ -3417,17 +3444,20 @@ fn parse_alter_role() {
Statement::AlterRole { Statement::AlterRole {
name: Ident { name: Ident {
value: "role_name".into(), value: "role_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
operation: AlterRoleOperation::Set { operation: AlterRoleOperation::Set {
config_name: ObjectName(vec![Ident { config_name: ObjectName(vec![Ident {
value: "maintenance_work_mem".into(), value: "maintenance_work_mem".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
config_value: SetConfigValue::Value(Expr::Value(number("100000"))), config_value: SetConfigValue::Value(Expr::Value(number("100000"))),
in_database: Some(ObjectName(vec![Ident { in_database: Some(ObjectName(vec![Ident {
value: "database_name".into(), value: "database_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}])) }]))
}, },
} }
@ -3439,17 +3469,20 @@ fn parse_alter_role() {
Statement::AlterRole { Statement::AlterRole {
name: Ident { name: Ident {
value: "role_name".into(), value: "role_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
operation: AlterRoleOperation::Set { operation: AlterRoleOperation::Set {
config_name: ObjectName(vec![Ident { config_name: ObjectName(vec![Ident {
value: "maintenance_work_mem".into(), value: "maintenance_work_mem".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
config_value: SetConfigValue::Default, config_value: SetConfigValue::Default,
in_database: Some(ObjectName(vec![Ident { in_database: Some(ObjectName(vec![Ident {
value: "database_name".into(), value: "database_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}])) }]))
}, },
} }
@ -3461,7 +3494,8 @@ fn parse_alter_role() {
Statement::AlterRole { Statement::AlterRole {
name: Ident { name: Ident {
value: "role_name".into(), value: "role_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
operation: AlterRoleOperation::Reset { operation: AlterRoleOperation::Reset {
config_name: ResetConfig::ALL, config_name: ResetConfig::ALL,
@ -3476,16 +3510,19 @@ fn parse_alter_role() {
Statement::AlterRole { Statement::AlterRole {
name: Ident { name: Ident {
value: "role_name".into(), value: "role_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
operation: AlterRoleOperation::Reset { operation: AlterRoleOperation::Reset {
config_name: ResetConfig::ConfigName(ObjectName(vec![Ident { config_name: ResetConfig::ConfigName(ObjectName(vec![Ident {
value: "maintenance_work_mem".into(), value: "maintenance_work_mem".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}])), }])),
in_database: Some(ObjectName(vec![Ident { in_database: Some(ObjectName(vec![Ident {
value: "database_name".into(), value: "database_name".into(),
quote_style: None quote_style: None,
span: Span::empty(),
}])) }]))
}, },
} }
@ -3630,7 +3667,8 @@ fn parse_drop_function() {
func_desc: vec![FunctionDesc { func_desc: vec![FunctionDesc {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "test_func".to_string(), value: "test_func".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
args: None args: None
}], }],
@ -3646,7 +3684,8 @@ fn parse_drop_function() {
func_desc: vec![FunctionDesc { func_desc: vec![FunctionDesc {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "test_func".to_string(), value: "test_func".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
args: Some(vec![ args: Some(vec![
OperateFunctionArg::with_name("a", DataType::Integer(None)), OperateFunctionArg::with_name("a", DataType::Integer(None)),
@ -3671,7 +3710,8 @@ fn parse_drop_function() {
FunctionDesc { FunctionDesc {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "test_func1".to_string(), value: "test_func1".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
args: Some(vec![ args: Some(vec![
OperateFunctionArg::with_name("a", DataType::Integer(None)), OperateFunctionArg::with_name("a", DataType::Integer(None)),
@ -3689,7 +3729,8 @@ fn parse_drop_function() {
FunctionDesc { FunctionDesc {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "test_func2".to_string(), value: "test_func2".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
args: Some(vec![ args: Some(vec![
OperateFunctionArg::with_name("a", DataType::Varchar(None)), OperateFunctionArg::with_name("a", DataType::Varchar(None)),
@ -3720,7 +3761,8 @@ fn parse_drop_procedure() {
proc_desc: vec![FunctionDesc { proc_desc: vec![FunctionDesc {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "test_proc".to_string(), value: "test_proc".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
args: None args: None
}], }],
@ -3736,7 +3778,8 @@ fn parse_drop_procedure() {
proc_desc: vec![FunctionDesc { proc_desc: vec![FunctionDesc {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "test_proc".to_string(), value: "test_proc".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
args: Some(vec![ args: Some(vec![
OperateFunctionArg::with_name("a", DataType::Integer(None)), OperateFunctionArg::with_name("a", DataType::Integer(None)),
@ -3761,7 +3804,8 @@ fn parse_drop_procedure() {
FunctionDesc { FunctionDesc {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "test_proc1".to_string(), value: "test_proc1".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
args: Some(vec![ args: Some(vec![
OperateFunctionArg::with_name("a", DataType::Integer(None)), OperateFunctionArg::with_name("a", DataType::Integer(None)),
@ -3779,7 +3823,8 @@ fn parse_drop_procedure() {
FunctionDesc { FunctionDesc {
name: ObjectName(vec![Ident { name: ObjectName(vec![Ident {
value: "test_proc2".to_string(), value: "test_proc2".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
args: Some(vec![ args: Some(vec![
OperateFunctionArg::with_name("a", DataType::Varchar(None)), OperateFunctionArg::with_name("a", DataType::Varchar(None)),
@ -3860,6 +3905,7 @@ fn parse_dollar_quoted_string() {
alias: Ident { alias: Ident {
value: "col_name".into(), value: "col_name".into(),
quote_style: None, quote_style: None,
span: Span::empty(),
}, },
} }
); );
@ -4204,20 +4250,24 @@ fn test_simple_postgres_insert_with_alias() {
into: true, into: true,
table_name: ObjectName(vec![Ident { table_name: ObjectName(vec![Ident {
value: "test_tables".to_string(), value: "test_tables".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
table_alias: Some(Ident { table_alias: Some(Ident {
value: "test_table".to_string(), value: "test_table".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}), }),
columns: vec![ columns: vec![
Ident { Ident {
value: "id".to_string(), value: "id".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
Ident { Ident {
value: "a".to_string(), value: "a".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
} }
], ],
overwrite: false, overwrite: false,
@ -4267,20 +4317,24 @@ fn test_simple_postgres_insert_with_alias() {
into: true, into: true,
table_name: ObjectName(vec![Ident { table_name: ObjectName(vec![Ident {
value: "test_tables".to_string(), value: "test_tables".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
table_alias: Some(Ident { table_alias: Some(Ident {
value: "test_table".to_string(), value: "test_table".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}), }),
columns: vec![ columns: vec![
Ident { Ident {
value: "id".to_string(), value: "id".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
Ident { Ident {
value: "a".to_string(), value: "a".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
} }
], ],
overwrite: false, overwrite: false,
@ -4332,20 +4386,24 @@ fn test_simple_insert_with_quoted_alias() {
into: true, into: true,
table_name: ObjectName(vec![Ident { table_name: ObjectName(vec![Ident {
value: "test_tables".to_string(), value: "test_tables".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}]), }]),
table_alias: Some(Ident { table_alias: Some(Ident {
value: "Test_Table".to_string(), value: "Test_Table".to_string(),
quote_style: Some('"') quote_style: Some('"'),
span: Span::empty(),
}), }),
columns: vec![ columns: vec![
Ident { Ident {
value: "id".to_string(), value: "id".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
}, },
Ident { Ident {
value: "a".to_string(), value: "a".to_string(),
quote_style: None quote_style: None,
span: Span::empty(),
} }
], ],
overwrite: false, overwrite: false,
@ -5017,6 +5075,7 @@ fn check_arrow_precedence(sql: &str, arrow_operator: BinaryOperator) {
left: Box::new(Expr::Identifier(Ident { left: Box::new(Expr::Identifier(Ident {
value: "foo".to_string(), value: "foo".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
op: arrow_operator, op: arrow_operator,
right: Box::new(Expr::Value(Value::SingleQuotedString("bar".to_string()))), right: Box::new(Expr::Value(Value::SingleQuotedString("bar".to_string()))),
@ -5047,6 +5106,7 @@ fn arrow_cast_precedence() {
left: Box::new(Expr::Identifier(Ident { left: Box::new(Expr::Identifier(Ident {
value: "foo".to_string(), value: "foo".to_string(),
quote_style: None, quote_style: None,
span: Span::empty(),
})), })),
op: BinaryOperator::Arrow, op: BinaryOperator::Arrow,
right: Box::new(Expr::Cast { right: Box::new(Expr::Cast {

View file

@ -18,6 +18,7 @@
#[macro_use] #[macro_use]
mod test_utils; mod test_utils;
use sqlparser::tokenizer::Span;
use test_utils::*; use test_utils::*;
use sqlparser::ast::*; use sqlparser::ast::*;
@ -31,7 +32,8 @@ fn test_square_brackets_over_db_schema_table_name() {
select.projection[0], select.projection[0],
SelectItem::UnnamedExpr(Expr::Identifier(Ident { SelectItem::UnnamedExpr(Expr::Identifier(Ident {
value: "col1".to_string(), value: "col1".to_string(),
quote_style: Some('[') quote_style: Some('['),
span: Span::empty(),
})), })),
); );
assert_eq!( assert_eq!(
@ -41,11 +43,13 @@ fn test_square_brackets_over_db_schema_table_name() {
name: ObjectName(vec![ name: ObjectName(vec![
Ident { Ident {
value: "test_schema".to_string(), value: "test_schema".to_string(),
quote_style: Some('[') quote_style: Some('['),
span: Span::empty(),
}, },
Ident { Ident {
value: "test_table".to_string(), value: "test_table".to_string(),
quote_style: Some('[') quote_style: Some('['),
span: Span::empty(),
} }
]), ]),
alias: None, alias: None,
@ -79,7 +83,8 @@ fn test_double_quotes_over_db_schema_table_name() {
select.projection[0], select.projection[0],
SelectItem::UnnamedExpr(Expr::Identifier(Ident { SelectItem::UnnamedExpr(Expr::Identifier(Ident {
value: "col1".to_string(), value: "col1".to_string(),
quote_style: Some('"') quote_style: Some('"'),
span: Span::empty(),
})), })),
); );
assert_eq!( assert_eq!(
@ -89,11 +94,13 @@ fn test_double_quotes_over_db_schema_table_name() {
name: ObjectName(vec![ name: ObjectName(vec![
Ident { Ident {
value: "test_schema".to_string(), value: "test_schema".to_string(),
quote_style: Some('"') quote_style: Some('"'),
span: Span::empty(),
}, },
Ident { Ident {
value: "test_table".to_string(), value: "test_table".to_string(),
quote_style: Some('"') quote_style: Some('"'),
span: Span::empty(),
} }
]), ]),
alias: None, alias: None,

View file

@ -2762,7 +2762,9 @@ fn parse_view_column_descriptions() {
#[test] #[test]
fn test_parentheses_overflow() { fn test_parentheses_overflow() {
let max_nesting_level: usize = 30; // TODO: increase / improve after we fix the recursion limit
// for real (see https://github.com/apache/datafusion-sqlparser-rs/issues/984)
let max_nesting_level: usize = 25;
// Verify the recursion check is not too wasteful... (num of parentheses - 2 is acceptable) // Verify the recursion check is not too wasteful... (num of parentheses - 2 is acceptable)
let slack = 2; let slack = 2;