From ab88e02f0d7ffe206dec149fff6cdea8c069250e Mon Sep 17 00:00:00 2001 From: Nickolay Ponomarev Date: Sat, 8 Jun 2019 22:10:57 +0300 Subject: [PATCH] Raise a TokenizerError when a delimited identifier is not closed before EOF --- src/sqltokenizer.rs | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/src/sqltokenizer.rs b/src/sqltokenizer.rs index 5725f2d5..ea7ca49c 100644 --- a/src/sqltokenizer.rs +++ b/src/sqltokenizer.rs @@ -322,8 +322,14 @@ impl<'a> Tokenizer<'a> { chars.next(); // consume the opening quote let quote_end = SQLWord::matching_end_quote(quote_start); let s = peeking_take_while(chars, |ch| ch != quote_end); - chars.next(); // TODO: raise error on EOF - Ok(Some(Token::make_word(&s, Some(quote_start)))) + if chars.next() == Some(quote_end) { + Ok(Some(Token::make_word(&s, Some(quote_start)))) + } else { + Err(TokenizerError(format!( + "Expected close delimiter '{}' before EOF.", + quote_end + ))) + } } // numbers '0'..='9' => { @@ -743,6 +749,20 @@ mod tests { compare(expected, tokens); } + #[test] + fn tokenize_mismatched_quotes() { + let sql = String::from("\"foo"); + + let dialect = GenericSqlDialect {}; + let mut tokenizer = Tokenizer::new(&dialect, &sql); + assert_eq!( + tokenizer.tokenize(), + Err(TokenizerError( + "Expected close delimiter '\"' before EOF.".to_string(), + )) + ); + } + #[test] fn tokenize_newlines() { let sql = String::from("line1\nline2\rline3\r\nline4\r");