mirror of
https://github.com/apache/datafusion-sqlparser-rs.git
synced 2025-09-27 07:59:11 +00:00
Reduce cloning of tokens
- Avoid cloning whitespace tokens in `peek_nth_token()` by using a &Token from `tokens.get()` instead of a cloned `Token` from `token_at()` - Similarly avoid cloning in `next_token_no_skip`, and clone the non-whitespace tokens in `next_token` instead. - Remove `token_at`, which was only used in `peek_token` and `peek_nth_token` - Fold `prev_token_no_skip()` into `prev_token()` and make `prev_token` return nothing, as the return value isn't used anyway.
This commit is contained in:
parent
ebb82b8c8f
commit
1227fddd48
1 changed files with 16 additions and 41 deletions
|
@ -567,13 +567,13 @@ impl Parser {
|
||||||
pub fn peek_nth_token(&self, mut n: usize) -> Option<Token> {
|
pub fn peek_nth_token(&self, mut n: usize) -> Option<Token> {
|
||||||
let mut index = self.index;
|
let mut index = self.index;
|
||||||
loop {
|
loop {
|
||||||
match self.token_at(index) {
|
match self.tokens.get(index) {
|
||||||
Some(Token::Whitespace(_)) => {
|
Some(Token::Whitespace(_)) => {
|
||||||
index += 1;
|
index += 1;
|
||||||
}
|
}
|
||||||
Some(token) => {
|
Some(token) => {
|
||||||
if n == 0 {
|
if n == 0 {
|
||||||
return Some(token);
|
return Some(token.clone());
|
||||||
}
|
}
|
||||||
index += 1;
|
index += 1;
|
||||||
n -= 1;
|
n -= 1;
|
||||||
|
@ -589,56 +589,32 @@ impl Parser {
|
||||||
pub fn next_token(&mut self) -> Option<Token> {
|
pub fn next_token(&mut self) -> Option<Token> {
|
||||||
loop {
|
loop {
|
||||||
match self.next_token_no_skip() {
|
match self.next_token_no_skip() {
|
||||||
Some(Token::Whitespace(_)) => {
|
Some(Token::Whitespace(_)) => continue,
|
||||||
continue;
|
token => return token.cloned(),
|
||||||
}
|
|
||||||
token => {
|
|
||||||
return token;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// see the token at this index
|
pub fn next_token_no_skip(&mut self) -> Option<&Token> {
|
||||||
fn token_at(&self, n: usize) -> Option<Token> {
|
|
||||||
if let Some(token) = self.tokens.get(n) {
|
|
||||||
Some(token.clone())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn next_token_no_skip(&mut self) -> Option<Token> {
|
|
||||||
if self.index < self.tokens.len() {
|
if self.index < self.tokens.len() {
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
Some(self.tokens[self.index - 1].clone())
|
Some(&self.tokens[self.index - 1])
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Push back the last one non-whitespace token
|
/// Push back the last one non-whitespace token
|
||||||
pub fn prev_token(&mut self) -> Option<Token> {
|
pub fn prev_token(&mut self) {
|
||||||
// TODO: returned value is unused (available via peek_token)
|
|
||||||
loop {
|
loop {
|
||||||
match self.prev_token_no_skip() {
|
assert!(self.index > 0);
|
||||||
Some(Token::Whitespace(_)) => {
|
if self.index > 0 {
|
||||||
|
self.index -= 1;
|
||||||
|
if let Token::Whitespace(_) = &self.tokens[self.index] {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
token => {
|
};
|
||||||
return token;
|
return;
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the previous token and decrement the token index
|
|
||||||
fn prev_token_no_skip(&mut self) -> Option<Token> {
|
|
||||||
if self.index > 0 {
|
|
||||||
self.index -= 1;
|
|
||||||
Some(self.tokens[self.index].clone())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1776,13 +1752,12 @@ mod tests {
|
||||||
fn test_prev_index() {
|
fn test_prev_index() {
|
||||||
let sql = "SELECT version()";
|
let sql = "SELECT version()";
|
||||||
all_dialects().run_parser_method(sql, |parser| {
|
all_dialects().run_parser_method(sql, |parser| {
|
||||||
assert_eq!(parser.prev_token(), None);
|
|
||||||
assert_eq!(parser.next_token(), Some(Token::make_keyword("SELECT")));
|
assert_eq!(parser.next_token(), Some(Token::make_keyword("SELECT")));
|
||||||
assert_eq!(parser.next_token(), Some(Token::make_word("version", None)));
|
assert_eq!(parser.next_token(), Some(Token::make_word("version", None)));
|
||||||
assert_eq!(parser.prev_token(), Some(Token::make_word("version", None)));
|
parser.prev_token();
|
||||||
assert_eq!(parser.peek_token(), Some(Token::make_word("version", None)));
|
assert_eq!(parser.peek_token(), Some(Token::make_word("version", None)));
|
||||||
assert_eq!(parser.prev_token(), Some(Token::make_keyword("SELECT")));
|
parser.prev_token();
|
||||||
assert_eq!(parser.prev_token(), None);
|
assert_eq!(parser.peek_token(), Some(Token::make_keyword("SELECT")));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue