PostgreSQL Tokenization: Fix unexpected characters after question mark being silently ignored (#2129)
Some checks are pending
license / Release Audit Tool (RAT) (push) Waiting to run
Rust / codestyle (push) Waiting to run
Rust / lint (push) Waiting to run
Rust / benchmark-lint (push) Waiting to run
Rust / compile (push) Waiting to run
Rust / docs (push) Waiting to run
Rust / compile-no-std (push) Waiting to run
Rust / test (beta) (push) Waiting to run
Rust / test (nightly) (push) Waiting to run
Rust / test (stable) (push) Waiting to run

This commit is contained in:
jnlt3 2025-12-18 15:06:55 +03:00 committed by GitHub
parent d78dbc97a1
commit 39418cfebb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -1717,7 +1717,7 @@ impl<'a> Tokenizer<'a> {
}
}
Some('#') => self.consume_and_return(chars, Token::QuestionMarkSharp),
_ => self.consume_and_return(chars, Token::Question),
_ => Ok(Some(Token::Question)),
}
}
'?' => {
@ -4147,4 +4147,23 @@ mod tests {
panic!("Tokenizer should have failed on {sql}, but it succeeded with {tokens:?}");
}
}
#[test]
fn tokenize_question_mark() {
let dialect = PostgreSqlDialect {};
let sql = "SELECT x ? y";
let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap();
compare(
tokens,
vec![
Token::make_keyword("SELECT"),
Token::Whitespace(Whitespace::Space),
Token::make_word("x", None),
Token::Whitespace(Whitespace::Space),
Token::Question,
Token::Whitespace(Whitespace::Space),
Token::make_word("y", None),
],
)
}
}