mirror of
https://github.com/apache/datafusion-sqlparser-rs.git
synced 2025-08-22 23:14:07 +00:00
Take slice as input for parse_keywords (#199)
This commit is contained in:
parent
6cdd4a146d
commit
f4fbd9b6b3
1 changed files with 31 additions and 31 deletions
|
@ -312,13 +312,13 @@ impl Parser {
|
||||||
let over = if self.parse_keyword(Keyword::OVER) {
|
let over = if self.parse_keyword(Keyword::OVER) {
|
||||||
// TBD: support window names (`OVER mywin`) in place of inline specification
|
// TBD: support window names (`OVER mywin`) in place of inline specification
|
||||||
self.expect_token(&Token::LParen)?;
|
self.expect_token(&Token::LParen)?;
|
||||||
let partition_by = if self.parse_keywords(vec![Keyword::PARTITION, Keyword::BY]) {
|
let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
|
||||||
// a list of possibly-qualified column names
|
// a list of possibly-qualified column names
|
||||||
self.parse_comma_separated(Parser::parse_expr)?
|
self.parse_comma_separated(Parser::parse_expr)?
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
};
|
};
|
||||||
let order_by = if self.parse_keywords(vec![Keyword::ORDER, Keyword::BY]) {
|
let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
|
||||||
self.parse_comma_separated(Parser::parse_order_by_expr)?
|
self.parse_comma_separated(Parser::parse_order_by_expr)?
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
|
@ -379,7 +379,7 @@ impl Parser {
|
||||||
|
|
||||||
/// Parse `CURRENT ROW` or `{ <positive number> | UNBOUNDED } { PRECEDING | FOLLOWING }`
|
/// Parse `CURRENT ROW` or `{ <positive number> | UNBOUNDED } { PRECEDING | FOLLOWING }`
|
||||||
pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
|
pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
|
||||||
if self.parse_keywords(vec![Keyword::CURRENT, Keyword::ROW]) {
|
if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
|
||||||
Ok(WindowFrameBound::CurrentRow)
|
Ok(WindowFrameBound::CurrentRow)
|
||||||
} else {
|
} else {
|
||||||
let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
|
let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
|
||||||
|
@ -472,7 +472,7 @@ impl Parser {
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
let on_overflow = if self.parse_keywords(vec![Keyword::ON, Keyword::OVERFLOW]) {
|
let on_overflow = if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
|
||||||
if self.parse_keyword(Keyword::ERROR) {
|
if self.parse_keyword(Keyword::ERROR) {
|
||||||
Some(ListAggOnOverflow::Error)
|
Some(ListAggOnOverflow::Error)
|
||||||
} else {
|
} else {
|
||||||
|
@ -503,7 +503,7 @@ impl Parser {
|
||||||
self.expect_token(&Token::RParen)?;
|
self.expect_token(&Token::RParen)?;
|
||||||
// Once again ANSI SQL requires WITHIN GROUP, but Redshift does not. Again we choose the
|
// Once again ANSI SQL requires WITHIN GROUP, but Redshift does not. Again we choose the
|
||||||
// more general implementation.
|
// more general implementation.
|
||||||
let within_group = if self.parse_keywords(vec![Keyword::WITHIN, Keyword::GROUP]) {
|
let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
|
||||||
self.expect_token(&Token::LParen)?;
|
self.expect_token(&Token::LParen)?;
|
||||||
self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
|
self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
|
||||||
let order_by_expr = self.parse_comma_separated(Parser::parse_order_by_expr)?;
|
let order_by_expr = self.parse_comma_separated(Parser::parse_order_by_expr)?;
|
||||||
|
@ -665,7 +665,7 @@ impl Parser {
|
||||||
Keyword::IS => {
|
Keyword::IS => {
|
||||||
if self.parse_keyword(Keyword::NULL) {
|
if self.parse_keyword(Keyword::NULL) {
|
||||||
Ok(Expr::IsNull(Box::new(expr)))
|
Ok(Expr::IsNull(Box::new(expr)))
|
||||||
} else if self.parse_keywords(vec![Keyword::NOT, Keyword::NULL]) {
|
} else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
|
||||||
Ok(Expr::IsNotNull(Box::new(expr)))
|
Ok(Expr::IsNotNull(Box::new(expr)))
|
||||||
} else {
|
} else {
|
||||||
self.expected("NULL or NOT NULL after IS", self.peek_token())
|
self.expected("NULL or NOT NULL after IS", self.peek_token())
|
||||||
|
@ -849,9 +849,9 @@ impl Parser {
|
||||||
|
|
||||||
/// Look for an expected sequence of keywords and consume them if they exist
|
/// Look for an expected sequence of keywords and consume them if they exist
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn parse_keywords(&mut self, keywords: Vec<Keyword>) -> bool {
|
pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
|
||||||
let index = self.index;
|
let index = self.index;
|
||||||
for keyword in keywords {
|
for &keyword in keywords {
|
||||||
if !self.parse_keyword(keyword) {
|
if !self.parse_keyword(keyword) {
|
||||||
//println!("parse_keywords aborting .. did not find {}", keyword);
|
//println!("parse_keywords aborting .. did not find {}", keyword);
|
||||||
// reset index and return immediately
|
// reset index and return immediately
|
||||||
|
@ -979,7 +979,7 @@ impl Parser {
|
||||||
self.parse_create_table()
|
self.parse_create_table()
|
||||||
} else if self.parse_keyword(Keyword::INDEX) {
|
} else if self.parse_keyword(Keyword::INDEX) {
|
||||||
self.parse_create_index(false)
|
self.parse_create_index(false)
|
||||||
} else if self.parse_keywords(vec![Keyword::UNIQUE, Keyword::INDEX]) {
|
} else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
|
||||||
self.parse_create_index(true)
|
self.parse_create_index(true)
|
||||||
} else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) {
|
} else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) {
|
||||||
self.prev_token();
|
self.prev_token();
|
||||||
|
@ -1057,7 +1057,7 @@ impl Parser {
|
||||||
};
|
};
|
||||||
// Many dialects support the non standard `IF EXISTS` clause and allow
|
// Many dialects support the non standard `IF EXISTS` clause and allow
|
||||||
// specifying multiple objects to delete in a single statement
|
// specifying multiple objects to delete in a single statement
|
||||||
let if_exists = self.parse_keywords(vec![Keyword::IF, Keyword::EXISTS]);
|
let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
|
||||||
let names = self.parse_comma_separated(Parser::parse_object_name)?;
|
let names = self.parse_comma_separated(Parser::parse_object_name)?;
|
||||||
let cascade = self.parse_keyword(Keyword::CASCADE);
|
let cascade = self.parse_keyword(Keyword::CASCADE);
|
||||||
let restrict = self.parse_keyword(Keyword::RESTRICT);
|
let restrict = self.parse_keyword(Keyword::RESTRICT);
|
||||||
|
@ -1073,7 +1073,7 @@ impl Parser {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
|
pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
|
||||||
let if_not_exists = self.parse_keywords(vec![Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
|
let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
|
||||||
let index_name = self.parse_object_name()?;
|
let index_name = self.parse_object_name()?;
|
||||||
self.expect_keyword(Keyword::ON)?;
|
self.expect_keyword(Keyword::ON)?;
|
||||||
let table_name = self.parse_object_name()?;
|
let table_name = self.parse_object_name()?;
|
||||||
|
@ -1088,7 +1088,7 @@ impl Parser {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_create_table(&mut self) -> Result<Statement, ParserError> {
|
pub fn parse_create_table(&mut self) -> Result<Statement, ParserError> {
|
||||||
let if_not_exists = self.parse_keywords(vec![Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
|
let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
|
||||||
let table_name = self.parse_object_name()?;
|
let table_name = self.parse_object_name()?;
|
||||||
// parse optional column list (schema)
|
// parse optional column list (schema)
|
||||||
let (columns, constraints) = self.parse_columns()?;
|
let (columns, constraints) = self.parse_columns()?;
|
||||||
|
@ -1160,13 +1160,13 @@ impl Parser {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
let option = if self.parse_keywords(vec![Keyword::NOT, Keyword::NULL]) {
|
let option = if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
|
||||||
ColumnOption::NotNull
|
ColumnOption::NotNull
|
||||||
} else if self.parse_keyword(Keyword::NULL) {
|
} else if self.parse_keyword(Keyword::NULL) {
|
||||||
ColumnOption::Null
|
ColumnOption::Null
|
||||||
} else if self.parse_keyword(Keyword::DEFAULT) {
|
} else if self.parse_keyword(Keyword::DEFAULT) {
|
||||||
ColumnOption::Default(self.parse_expr()?)
|
ColumnOption::Default(self.parse_expr()?)
|
||||||
} else if self.parse_keywords(vec![Keyword::PRIMARY, Keyword::KEY]) {
|
} else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
|
||||||
ColumnOption::Unique { is_primary: true }
|
ColumnOption::Unique { is_primary: true }
|
||||||
} else if self.parse_keyword(Keyword::UNIQUE) {
|
} else if self.parse_keyword(Keyword::UNIQUE) {
|
||||||
ColumnOption::Unique { is_primary: false }
|
ColumnOption::Unique { is_primary: false }
|
||||||
|
@ -1178,10 +1178,10 @@ impl Parser {
|
||||||
let mut on_delete = None;
|
let mut on_delete = None;
|
||||||
let mut on_update = None;
|
let mut on_update = None;
|
||||||
loop {
|
loop {
|
||||||
if on_delete.is_none() && self.parse_keywords(vec![Keyword::ON, Keyword::DELETE]) {
|
if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
|
||||||
on_delete = Some(self.parse_referential_action()?);
|
on_delete = Some(self.parse_referential_action()?);
|
||||||
} else if on_update.is_none()
|
} else if on_update.is_none()
|
||||||
&& self.parse_keywords(vec![Keyword::ON, Keyword::UPDATE])
|
&& self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
|
||||||
{
|
{
|
||||||
on_update = Some(self.parse_referential_action()?);
|
on_update = Some(self.parse_referential_action()?);
|
||||||
} else {
|
} else {
|
||||||
|
@ -1211,11 +1211,11 @@ impl Parser {
|
||||||
Ok(ReferentialAction::Restrict)
|
Ok(ReferentialAction::Restrict)
|
||||||
} else if self.parse_keyword(Keyword::CASCADE) {
|
} else if self.parse_keyword(Keyword::CASCADE) {
|
||||||
Ok(ReferentialAction::Cascade)
|
Ok(ReferentialAction::Cascade)
|
||||||
} else if self.parse_keywords(vec![Keyword::SET, Keyword::NULL]) {
|
} else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
|
||||||
Ok(ReferentialAction::SetNull)
|
Ok(ReferentialAction::SetNull)
|
||||||
} else if self.parse_keywords(vec![Keyword::NO, Keyword::ACTION]) {
|
} else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
|
||||||
Ok(ReferentialAction::NoAction)
|
Ok(ReferentialAction::NoAction)
|
||||||
} else if self.parse_keywords(vec![Keyword::SET, Keyword::DEFAULT]) {
|
} else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
|
||||||
Ok(ReferentialAction::SetDefault)
|
Ok(ReferentialAction::SetDefault)
|
||||||
} else {
|
} else {
|
||||||
self.expected(
|
self.expected(
|
||||||
|
@ -1633,7 +1633,7 @@ impl Parser {
|
||||||
|
|
||||||
let body = self.parse_query_body(0)?;
|
let body = self.parse_query_body(0)?;
|
||||||
|
|
||||||
let order_by = if self.parse_keywords(vec![Keyword::ORDER, Keyword::BY]) {
|
let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
|
||||||
self.parse_comma_separated(Parser::parse_order_by_expr)?
|
self.parse_comma_separated(Parser::parse_order_by_expr)?
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
|
@ -1772,7 +1772,7 @@ impl Parser {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
let group_by = if self.parse_keywords(vec![Keyword::GROUP, Keyword::BY]) {
|
let group_by = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
|
||||||
self.parse_comma_separated(Parser::parse_expr)?
|
self.parse_comma_separated(Parser::parse_expr)?
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
|
@ -2124,9 +2124,9 @@ impl Parser {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
let nulls_first = if self.parse_keywords(vec![Keyword::NULLS, Keyword::FIRST]) {
|
let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
|
||||||
Some(true)
|
Some(true)
|
||||||
} else if self.parse_keywords(vec![Keyword::NULLS, Keyword::LAST]) {
|
} else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
|
||||||
Some(false)
|
Some(false)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -2152,7 +2152,7 @@ impl Parser {
|
||||||
|
|
||||||
let percent = self.parse_keyword(Keyword::PERCENT);
|
let percent = self.parse_keyword(Keyword::PERCENT);
|
||||||
|
|
||||||
let with_ties = self.parse_keywords(vec![Keyword::WITH, Keyword::TIES]);
|
let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
|
||||||
|
|
||||||
Ok(Top {
|
Ok(Top {
|
||||||
with_ties,
|
with_ties,
|
||||||
|
@ -2199,7 +2199,7 @@ impl Parser {
|
||||||
};
|
};
|
||||||
let with_ties = if self.parse_keyword(Keyword::ONLY) {
|
let with_ties = if self.parse_keyword(Keyword::ONLY) {
|
||||||
false
|
false
|
||||||
} else if self.parse_keywords(vec![Keyword::WITH, Keyword::TIES]) {
|
} else if self.parse_keywords(&[Keyword::WITH, Keyword::TIES]) {
|
||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
return self.expected("one of ONLY or WITH TIES", self.peek_token());
|
return self.expected("one of ONLY or WITH TIES", self.peek_token());
|
||||||
|
@ -2239,12 +2239,12 @@ impl Parser {
|
||||||
let mut modes = vec![];
|
let mut modes = vec![];
|
||||||
let mut required = false;
|
let mut required = false;
|
||||||
loop {
|
loop {
|
||||||
let mode = if self.parse_keywords(vec![Keyword::ISOLATION, Keyword::LEVEL]) {
|
let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
|
||||||
let iso_level = if self.parse_keywords(vec![Keyword::READ, Keyword::UNCOMMITTED]) {
|
let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
|
||||||
TransactionIsolationLevel::ReadUncommitted
|
TransactionIsolationLevel::ReadUncommitted
|
||||||
} else if self.parse_keywords(vec![Keyword::READ, Keyword::COMMITTED]) {
|
} else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
|
||||||
TransactionIsolationLevel::ReadCommitted
|
TransactionIsolationLevel::ReadCommitted
|
||||||
} else if self.parse_keywords(vec![Keyword::REPEATABLE, Keyword::READ]) {
|
} else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
|
||||||
TransactionIsolationLevel::RepeatableRead
|
TransactionIsolationLevel::RepeatableRead
|
||||||
} else if self.parse_keyword(Keyword::SERIALIZABLE) {
|
} else if self.parse_keyword(Keyword::SERIALIZABLE) {
|
||||||
TransactionIsolationLevel::Serializable
|
TransactionIsolationLevel::Serializable
|
||||||
|
@ -2252,9 +2252,9 @@ impl Parser {
|
||||||
self.expected("isolation level", self.peek_token())?
|
self.expected("isolation level", self.peek_token())?
|
||||||
};
|
};
|
||||||
TransactionMode::IsolationLevel(iso_level)
|
TransactionMode::IsolationLevel(iso_level)
|
||||||
} else if self.parse_keywords(vec![Keyword::READ, Keyword::ONLY]) {
|
} else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
|
||||||
TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
|
TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
|
||||||
} else if self.parse_keywords(vec![Keyword::READ, Keyword::WRITE]) {
|
} else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
|
||||||
TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
|
TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
|
||||||
} else if required {
|
} else if required {
|
||||||
self.expected("transaction mode", self.peek_token())?
|
self.expected("transaction mode", self.peek_token())?
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue