Apply suggestions from cargo fix --edition-idioms

This commit is contained in:
Nickolay Ponomarev 2019-04-21 00:07:43 +03:00
parent b12a19e197
commit 0634ec4a83
3 changed files with 11 additions and 11 deletions

View file

@ -56,7 +56,7 @@ impl Parser {
} }
/// Parse a SQL statement and produce an Abstract Syntax Tree (AST) /// Parse a SQL statement and produce an Abstract Syntax Tree (AST)
pub fn parse_sql(dialect: &Dialect, sql: String) -> Result<Vec<SQLStatement>, ParserError> { pub fn parse_sql(dialect: &dyn Dialect, sql: String) -> Result<Vec<SQLStatement>, ParserError> {
let mut tokenizer = Tokenizer::new(dialect, &sql); let mut tokenizer = Tokenizer::new(dialect, &sql);
let tokens = tokenizer.tokenize()?; let tokens = tokenizer.tokenize()?;
let mut parser = Parser::new(tokens); let mut parser = Parser::new(tokens);

View file

@ -212,7 +212,7 @@ pub struct TokenizerError(String);
/// SQL Tokenizer /// SQL Tokenizer
pub struct Tokenizer<'a> { pub struct Tokenizer<'a> {
dialect: &'a Dialect, dialect: &'a dyn Dialect,
pub query: String, pub query: String,
pub line: u64, pub line: u64,
pub col: u64, pub col: u64,
@ -220,7 +220,7 @@ pub struct Tokenizer<'a> {
impl<'a> Tokenizer<'a> { impl<'a> Tokenizer<'a> {
/// Create a new SQL tokenizer for the specified SQL statement /// Create a new SQL tokenizer for the specified SQL statement
pub fn new(dialect: &'a Dialect, query: &str) -> Self { pub fn new(dialect: &'a dyn Dialect, query: &str) -> Self {
Self { Self {
dialect, dialect,
query: query.to_string(), query: query.to_string(),
@ -256,7 +256,7 @@ impl<'a> Tokenizer<'a> {
} }
/// Get the next token or return None /// Get the next token or return None
fn next_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<Token>, TokenizerError> { fn next_token(&self, chars: &mut Peekable<Chars<'_>>) -> Result<Option<Token>, TokenizerError> {
//println!("next_token: {:?}", chars.peek()); //println!("next_token: {:?}", chars.peek());
match chars.peek() { match chars.peek() {
Some(&ch) => match ch { Some(&ch) => match ch {
@ -312,11 +312,11 @@ impl<'a> Tokenizer<'a> {
Ok(Some(Token::make_word(&s, Some(quote_start)))) Ok(Some(Token::make_word(&s, Some(quote_start))))
} }
// numbers // numbers
'0'...'9' => { '0'..='9' => {
let mut s = String::new(); let mut s = String::new();
while let Some(&ch) = chars.peek() { while let Some(&ch) = chars.peek() {
match ch { match ch {
'0'...'9' | '.' => { '0'..='9' | '.' => {
chars.next(); // consume chars.next(); // consume
s.push(ch); s.push(ch);
} }
@ -436,7 +436,7 @@ impl<'a> Tokenizer<'a> {
} }
/// Tokenize an identifier or keyword, after the first char is already consumed. /// Tokenize an identifier or keyword, after the first char is already consumed.
fn tokenize_word(&self, first_char: char, chars: &mut Peekable<Chars>) -> String { fn tokenize_word(&self, first_char: char, chars: &mut Peekable<Chars<'_>>) -> String {
let mut s = String::new(); let mut s = String::new();
s.push(first_char); s.push(first_char);
while let Some(&ch) = chars.peek() { while let Some(&ch) = chars.peek() {
@ -451,7 +451,7 @@ impl<'a> Tokenizer<'a> {
} }
/// Read a single quoted string, starting with the opening quote. /// Read a single quoted string, starting with the opening quote.
fn tokenize_single_quoted_string(&self, chars: &mut Peekable<Chars>) -> String { fn tokenize_single_quoted_string(&self, chars: &mut Peekable<Chars<'_>>) -> String {
//TODO: handle escaped quotes in string //TODO: handle escaped quotes in string
//TODO: handle newlines in string //TODO: handle newlines in string
//TODO: handle EOF before terminating quote //TODO: handle EOF before terminating quote
@ -475,7 +475,7 @@ impl<'a> Tokenizer<'a> {
fn tokenize_multiline_comment( fn tokenize_multiline_comment(
&self, &self,
chars: &mut Peekable<Chars>, chars: &mut Peekable<Chars<'_>>,
) -> Result<Option<Token>, TokenizerError> { ) -> Result<Option<Token>, TokenizerError> {
let mut s = String::new(); let mut s = String::new();
let mut maybe_closing_comment = false; let mut maybe_closing_comment = false;
@ -506,7 +506,7 @@ impl<'a> Tokenizer<'a> {
fn consume_and_return( fn consume_and_return(
&self, &self,
chars: &mut Peekable<Chars>, chars: &mut Peekable<Chars<'_>>,
t: Token, t: Token,
) -> Result<Option<Token>, TokenizerError> { ) -> Result<Option<Token>, TokenizerError> {
chars.next(); chars.next();

View file

@ -1070,7 +1070,7 @@ fn parse_sql_expr(sql: &str) -> ASTNode {
generic_ast generic_ast
} }
fn parse_sql_expr_with(dialect: &Dialect, sql: &str) -> ASTNode { fn parse_sql_expr_with(dialect: &dyn Dialect, sql: &str) -> ASTNode {
let mut tokenizer = Tokenizer::new(dialect, &sql); let mut tokenizer = Tokenizer::new(dialect, &sql);
let tokens = tokenizer.tokenize().unwrap(); let tokens = tokenizer.tokenize().unwrap();
let mut parser = Parser::new(tokens); let mut parser = Parser::new(tokens);