From c6c391c114987265d0f9cbeb8a3d9de12ea763b9 Mon Sep 17 00:00:00 2001 From: Luca Date: Wed, 29 Oct 2025 10:38:42 +0100 Subject: [PATCH] Fixed broken doc test --- src/tokenizer.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 9b1094f8..3fa46a48 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -851,7 +851,7 @@ impl<'a> Tokenizer<'a> { /// Create a new SQL tokenizer for the specified SQL statement /// /// ``` - /// # use sqlparser::tokenizer::{Token, Whitespace, Tokenizer}; + /// # use sqlparser::tokenizer::{Token, Tokenizer}; /// # use sqlparser::dialect::GenericDialect; /// # let dialect = GenericDialect{}; /// let query = r#"SELECT 'foo'"#; @@ -861,7 +861,6 @@ impl<'a> Tokenizer<'a> { /// /// assert_eq!(tokens, vec![ /// Token::make_word("SELECT", None), - /// Token::Whitespace(Whitespace::Space), /// Token::SingleQuotedString("foo".to_string()), /// ]); pub fn new(dialect: &'a dyn Dialect, query: &'a str) -> Self { @@ -1673,7 +1672,7 @@ impl<'a> Tokenizer<'a> { // regular identifier starting with an "E" or "e" let s = self.tokenize_word("~", chars, prev_keyword)?; Ok(Some(Token::make_word(s, None))) - } + } _ => self.start_binop(chars, "~", Token::Tilde), } }