mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-10 02:12:40 +00:00
clippy::redudant_borrow
This commit is contained in:
parent
d6737e55fb
commit
c9b4ac5be4
114 changed files with 285 additions and 285 deletions
|
@ -30,7 +30,7 @@ impl ast::UseTree {
|
|||
let suffix = if self.path().as_ref() == Some(prefix) && self.use_tree_list().is_none() {
|
||||
make::path_unqualified(make::path_segment_self())
|
||||
} else {
|
||||
match split_path_prefix(&prefix) {
|
||||
match split_path_prefix(prefix) {
|
||||
Some(it) => it,
|
||||
None => return self.clone(),
|
||||
}
|
||||
|
|
|
@ -242,7 +242,7 @@ impl ast::ByteString {
|
|||
(Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
|
||||
(Ok(c), true) => {
|
||||
buf.reserve_exact(text.len());
|
||||
buf.extend_from_slice(&text[..char_range.start].as_bytes());
|
||||
buf.extend_from_slice(text[..char_range.start].as_bytes());
|
||||
buf.push(c as u8);
|
||||
}
|
||||
(Err(_), _) => has_error = true,
|
||||
|
|
|
@ -15,7 +15,7 @@ use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
|
|||
pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse};
|
||||
|
||||
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
|
||||
let (tokens, lexer_errors) = tokenize(&text);
|
||||
let (tokens, lexer_errors) = tokenize(text);
|
||||
|
||||
let mut token_source = TextTokenSource::new(text, &tokens);
|
||||
let mut tree_sink = TextTreeSink::new(text, &tokens);
|
||||
|
@ -33,7 +33,7 @@ pub(crate) fn parse_text_fragment<T: AstNode>(
|
|||
text: &str,
|
||||
fragment_kind: parser::FragmentKind,
|
||||
) -> Result<T, ()> {
|
||||
let (tokens, lexer_errors) = tokenize(&text);
|
||||
let (tokens, lexer_errors) = tokenize(text);
|
||||
if !lexer_errors.is_empty() {
|
||||
return Err(());
|
||||
}
|
||||
|
|
|
@ -144,7 +144,7 @@ fn rustc_token_kind_to_syntax_kind(
|
|||
}
|
||||
|
||||
rustc_lexer::TokenKind::RawIdent => IDENT,
|
||||
rustc_lexer::TokenKind::Literal { kind, .. } => return match_literal_kind(&kind),
|
||||
rustc_lexer::TokenKind::Literal { kind, .. } => return match_literal_kind(kind),
|
||||
|
||||
rustc_lexer::TokenKind::Lifetime { starts_with_number: false } => LIFETIME_IDENT,
|
||||
rustc_lexer::TokenKind::Lifetime { starts_with_number: true } => {
|
||||
|
|
|
@ -26,11 +26,11 @@ pub(crate) fn incremental_reparse(
|
|||
edit: &Indel,
|
||||
errors: Vec<SyntaxError>,
|
||||
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
|
||||
if let Some((green, new_errors, old_range)) = reparse_token(node, &edit) {
|
||||
if let Some((green, new_errors, old_range)) = reparse_token(node, edit) {
|
||||
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
|
||||
}
|
||||
|
||||
if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) {
|
||||
if let Some((green, new_errors, old_range)) = reparse_block(node, edit) {
|
||||
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
|
||||
}
|
||||
None
|
||||
|
@ -52,7 +52,7 @@ fn reparse_token(
|
|||
}
|
||||
}
|
||||
|
||||
let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit);
|
||||
let mut new_text = get_text_after_edit(prev_token.clone().into(), edit);
|
||||
let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?;
|
||||
|
||||
if new_token_kind != prev_token_kind
|
||||
|
|
|
@ -69,13 +69,13 @@ fn parser_tests() {
|
|||
dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| {
|
||||
let parse = SourceFile::parse(text);
|
||||
let errors = parse.errors();
|
||||
assert_errors_are_absent(&errors, path);
|
||||
assert_errors_are_absent(errors, path);
|
||||
parse.debug_dump()
|
||||
});
|
||||
dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| {
|
||||
let parse = SourceFile::parse(text);
|
||||
let errors = parse.errors();
|
||||
assert_errors_are_present(&errors, path);
|
||||
assert_errors_are_present(errors, path);
|
||||
parse.debug_dump()
|
||||
});
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue