Eliminate redundant string copy of Tokenizer (#343)

Signed-off-by: koushiro <koushiro.cqx@gmail.com>
This commit is contained in:
Qinxuan Chen 2021-09-02 15:20:31 +08:00 committed by GitHub
parent 77d90d3b85
commit d8adb1708c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -290,17 +290,17 @@ pub struct TokenizerError {
/// SQL Tokenizer /// SQL Tokenizer
pub struct Tokenizer<'a> { pub struct Tokenizer<'a> {
dialect: &'a dyn Dialect, dialect: &'a dyn Dialect,
pub query: String, query: &'a str,
pub line: u64, line: u64,
pub col: u64, col: u64,
} }
impl<'a> Tokenizer<'a> { impl<'a> Tokenizer<'a> {
/// Create a new SQL tokenizer for the specified SQL statement /// Create a new SQL tokenizer for the specified SQL statement
pub fn new(dialect: &'a dyn Dialect, query: &str) -> Self { pub fn new(dialect: &'a dyn Dialect, query: &'a str) -> Self {
Self { Self {
dialect, dialect,
query: query.to_string(), query,
line: 1, line: 1,
col: 1, col: 1,
} }