clippy::redudant_borrow

This commit is contained in:
Maan2003 2021-06-13 09:24:16 +05:30
parent d6737e55fb
commit c9b4ac5be4
No known key found for this signature in database
GPG key ID: E9AF024BA63C70ED
114 changed files with 285 additions and 285 deletions

View file

@ -30,7 +30,7 @@ impl ast::UseTree {
let suffix = if self.path().as_ref() == Some(prefix) && self.use_tree_list().is_none() {
make::path_unqualified(make::path_segment_self())
} else {
match split_path_prefix(&prefix) {
match split_path_prefix(prefix) {
Some(it) => it,
None => return self.clone(),
}

View file

@ -242,7 +242,7 @@ impl ast::ByteString {
(Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
(Ok(c), true) => {
buf.reserve_exact(text.len());
buf.extend_from_slice(&text[..char_range.start].as_bytes());
buf.extend_from_slice(text[..char_range.start].as_bytes());
buf.push(c as u8);
}
(Err(_), _) => has_error = true,

View file

@ -15,7 +15,7 @@ use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse};
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
let (tokens, lexer_errors) = tokenize(&text);
let (tokens, lexer_errors) = tokenize(text);
let mut token_source = TextTokenSource::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &tokens);
@ -33,7 +33,7 @@ pub(crate) fn parse_text_fragment<T: AstNode>(
text: &str,
fragment_kind: parser::FragmentKind,
) -> Result<T, ()> {
let (tokens, lexer_errors) = tokenize(&text);
let (tokens, lexer_errors) = tokenize(text);
if !lexer_errors.is_empty() {
return Err(());
}

View file

@ -144,7 +144,7 @@ fn rustc_token_kind_to_syntax_kind(
}
rustc_lexer::TokenKind::RawIdent => IDENT,
rustc_lexer::TokenKind::Literal { kind, .. } => return match_literal_kind(&kind),
rustc_lexer::TokenKind::Literal { kind, .. } => return match_literal_kind(kind),
rustc_lexer::TokenKind::Lifetime { starts_with_number: false } => LIFETIME_IDENT,
rustc_lexer::TokenKind::Lifetime { starts_with_number: true } => {

View file

@ -26,11 +26,11 @@ pub(crate) fn incremental_reparse(
edit: &Indel,
errors: Vec<SyntaxError>,
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
if let Some((green, new_errors, old_range)) = reparse_token(node, &edit) {
if let Some((green, new_errors, old_range)) = reparse_token(node, edit) {
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
}
if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) {
if let Some((green, new_errors, old_range)) = reparse_block(node, edit) {
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
}
None
@ -52,7 +52,7 @@ fn reparse_token(
}
}
let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit);
let mut new_text = get_text_after_edit(prev_token.clone().into(), edit);
let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?;
if new_token_kind != prev_token_kind

View file

@ -69,13 +69,13 @@ fn parser_tests() {
dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| {
let parse = SourceFile::parse(text);
let errors = parse.errors();
assert_errors_are_absent(&errors, path);
assert_errors_are_absent(errors, path);
parse.debug_dump()
});
dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| {
let parse = SourceFile::parse(text);
let errors = parse.errors();
assert_errors_are_present(&errors, path);
assert_errors_are_present(errors, path);
parse.debug_dump()
});
}