mirror of
https://github.com/apache/datafusion-sqlparser-rs.git
synced 2025-10-09 21:42:05 +00:00
Support parsing empty map literal syntax for DuckDB and Genric (#1361)
This commit is contained in:
parent
d49acc67b1
commit
8f8c96f87f
2 changed files with 19 additions and 25 deletions
|
@ -1849,17 +1849,9 @@ impl<'a> Parser<'a> {
|
||||||
/// Parses an array expression `[ex1, ex2, ..]`
|
/// Parses an array expression `[ex1, ex2, ..]`
|
||||||
/// if `named` is `true`, came from an expression like `ARRAY[ex1, ex2]`
|
/// if `named` is `true`, came from an expression like `ARRAY[ex1, ex2]`
|
||||||
pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
|
pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
|
||||||
if self.peek_token().token == Token::RBracket {
|
let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
|
||||||
let _ = self.next_token(); // consume ]
|
self.expect_token(&Token::RBracket)?;
|
||||||
Ok(Expr::Array(Array {
|
Ok(Expr::Array(Array { elem: exprs, named }))
|
||||||
elem: vec![],
|
|
||||||
named,
|
|
||||||
}))
|
|
||||||
} else {
|
|
||||||
let exprs = self.parse_comma_separated(Parser::parse_expr)?;
|
|
||||||
self.expect_token(&Token::RBracket)?;
|
|
||||||
Ok(Expr::Array(Array { elem: exprs, named }))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
|
pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
|
||||||
|
@ -2352,11 +2344,8 @@ impl<'a> Parser<'a> {
|
||||||
/// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
|
/// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
|
||||||
fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
|
fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
|
||||||
self.expect_token(&Token::LBrace)?;
|
self.expect_token(&Token::LBrace)?;
|
||||||
|
let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
|
||||||
let fields = self.parse_comma_separated(Self::parse_duckdb_map_field)?;
|
|
||||||
|
|
||||||
self.expect_token(&Token::RBrace)?;
|
self.expect_token(&Token::RBrace)?;
|
||||||
|
|
||||||
Ok(Expr::Map(Map { entries: fields }))
|
Ok(Expr::Map(Map { entries: fields }))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2937,7 +2926,7 @@ impl<'a> Parser<'a> {
|
||||||
Expr::InList {
|
Expr::InList {
|
||||||
expr: Box::new(expr),
|
expr: Box::new(expr),
|
||||||
list: if self.dialect.supports_in_empty_list() {
|
list: if self.dialect.supports_in_empty_list() {
|
||||||
self.parse_comma_separated0(Parser::parse_expr)?
|
self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
|
||||||
} else {
|
} else {
|
||||||
self.parse_comma_separated(Parser::parse_expr)?
|
self.parse_comma_separated(Parser::parse_expr)?
|
||||||
},
|
},
|
||||||
|
@ -3479,18 +3468,20 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a comma-separated list of 0+ items accepted by `F`
|
/// Parse a comma-separated list of 0+ items accepted by `F`
|
||||||
pub fn parse_comma_separated0<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
|
/// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...)
|
||||||
|
pub fn parse_comma_separated0<T, F>(
|
||||||
|
&mut self,
|
||||||
|
f: F,
|
||||||
|
end_token: Token,
|
||||||
|
) -> Result<Vec<T>, ParserError>
|
||||||
where
|
where
|
||||||
F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
|
F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
|
||||||
{
|
{
|
||||||
// ()
|
if self.peek_token().token == end_token {
|
||||||
if matches!(self.peek_token().token, Token::RParen) {
|
|
||||||
return Ok(vec![]);
|
return Ok(vec![]);
|
||||||
}
|
}
|
||||||
// (,)
|
|
||||||
if self.options.trailing_commas
|
if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
|
||||||
&& matches!(self.peek_tokens(), [Token::Comma, Token::RParen])
|
|
||||||
{
|
|
||||||
let _ = self.consume_token(&Token::Comma);
|
let _ = self.consume_token(&Token::Comma);
|
||||||
return Ok(vec![]);
|
return Ok(vec![]);
|
||||||
}
|
}
|
||||||
|
@ -4059,7 +4050,7 @@ impl<'a> Parser<'a> {
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
self.expect_token(&Token::LParen)?;
|
self.expect_token(&Token::LParen)?;
|
||||||
let args = self.parse_comma_separated0(parse_function_param)?;
|
let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
|
||||||
self.expect_token(&Token::RParen)?;
|
self.expect_token(&Token::RParen)?;
|
||||||
|
|
||||||
let return_type = if self.parse_keyword(Keyword::RETURNS) {
|
let return_type = if self.parse_keyword(Keyword::RETURNS) {
|
||||||
|
@ -10713,7 +10704,8 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.consume_token(&Token::LParen) {
|
if self.consume_token(&Token::LParen) {
|
||||||
let interpolations = self.parse_comma_separated0(|p| p.parse_interpolation())?;
|
let interpolations =
|
||||||
|
self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
|
||||||
self.expect_token(&Token::RParen)?;
|
self.expect_token(&Token::RParen)?;
|
||||||
// INTERPOLATE () and INTERPOLATE ( ... ) variants
|
// INTERPOLATE () and INTERPOLATE ( ... ) variants
|
||||||
return Ok(Some(Interpolate {
|
return Ok(Some(Interpolate {
|
||||||
|
|
|
@ -10282,6 +10282,8 @@ fn test_map_syntax() {
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
check("MAP {}", Expr::Map(Map { entries: vec![] }));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue