From 0634ec4a83d9bce7ba1e879d9a543db5171b5d90 Mon Sep 17 00:00:00 2001 From: Nickolay Ponomarev Date: Sun, 21 Apr 2019 00:07:43 +0300 Subject: [PATCH] Apply suggestions from `cargo fix --edition-idioms` --- src/sqlparser.rs | 2 +- src/sqltokenizer.rs | 18 +++++++++--------- tests/sqlparser_generic.rs | 2 +- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/sqlparser.rs b/src/sqlparser.rs index 27824110..529e6724 100644 --- a/src/sqlparser.rs +++ b/src/sqlparser.rs @@ -56,7 +56,7 @@ impl Parser { } /// Parse a SQL statement and produce an Abstract Syntax Tree (AST) - pub fn parse_sql(dialect: &Dialect, sql: String) -> Result, ParserError> { + pub fn parse_sql(dialect: &dyn Dialect, sql: String) -> Result, ParserError> { let mut tokenizer = Tokenizer::new(dialect, &sql); let tokens = tokenizer.tokenize()?; let mut parser = Parser::new(tokens); diff --git a/src/sqltokenizer.rs b/src/sqltokenizer.rs index 83105736..cab8f22e 100644 --- a/src/sqltokenizer.rs +++ b/src/sqltokenizer.rs @@ -212,7 +212,7 @@ pub struct TokenizerError(String); /// SQL Tokenizer pub struct Tokenizer<'a> { - dialect: &'a Dialect, + dialect: &'a dyn Dialect, pub query: String, pub line: u64, pub col: u64, @@ -220,7 +220,7 @@ pub struct Tokenizer<'a> { impl<'a> Tokenizer<'a> { /// Create a new SQL tokenizer for the specified SQL statement - pub fn new(dialect: &'a Dialect, query: &str) -> Self { + pub fn new(dialect: &'a dyn Dialect, query: &str) -> Self { Self { dialect, query: query.to_string(), @@ -256,7 +256,7 @@ impl<'a> Tokenizer<'a> { } /// Get the next token or return None - fn next_token(&self, chars: &mut Peekable) -> Result, TokenizerError> { + fn next_token(&self, chars: &mut Peekable>) -> Result, TokenizerError> { //println!("next_token: {:?}", chars.peek()); match chars.peek() { Some(&ch) => match ch { @@ -312,11 +312,11 @@ impl<'a> Tokenizer<'a> { Ok(Some(Token::make_word(&s, Some(quote_start)))) } // numbers - '0'...'9' => { + '0'..='9' => { let mut s = String::new(); while let Some(&ch) = chars.peek() { match ch { - '0'...'9' | '.' => { + '0'..='9' | '.' => { chars.next(); // consume s.push(ch); } @@ -436,7 +436,7 @@ impl<'a> Tokenizer<'a> { } /// Tokenize an identifier or keyword, after the first char is already consumed. - fn tokenize_word(&self, first_char: char, chars: &mut Peekable) -> String { + fn tokenize_word(&self, first_char: char, chars: &mut Peekable>) -> String { let mut s = String::new(); s.push(first_char); while let Some(&ch) = chars.peek() { @@ -451,7 +451,7 @@ impl<'a> Tokenizer<'a> { } /// Read a single quoted string, starting with the opening quote. - fn tokenize_single_quoted_string(&self, chars: &mut Peekable) -> String { + fn tokenize_single_quoted_string(&self, chars: &mut Peekable>) -> String { //TODO: handle escaped quotes in string //TODO: handle newlines in string //TODO: handle EOF before terminating quote @@ -475,7 +475,7 @@ impl<'a> Tokenizer<'a> { fn tokenize_multiline_comment( &self, - chars: &mut Peekable, + chars: &mut Peekable>, ) -> Result, TokenizerError> { let mut s = String::new(); let mut maybe_closing_comment = false; @@ -506,7 +506,7 @@ impl<'a> Tokenizer<'a> { fn consume_and_return( &self, - chars: &mut Peekable, + chars: &mut Peekable>, t: Token, ) -> Result, TokenizerError> { chars.next(); diff --git a/tests/sqlparser_generic.rs b/tests/sqlparser_generic.rs index 390d47c0..5799cd4d 100644 --- a/tests/sqlparser_generic.rs +++ b/tests/sqlparser_generic.rs @@ -1070,7 +1070,7 @@ fn parse_sql_expr(sql: &str) -> ASTNode { generic_ast } -fn parse_sql_expr_with(dialect: &Dialect, sql: &str) -> ASTNode { +fn parse_sql_expr_with(dialect: &dyn Dialect, sql: &str) -> ASTNode { let mut tokenizer = Tokenizer::new(dialect, &sql); let tokens = tokenizer.tokenize().unwrap(); let mut parser = Parser::new(tokens);