diff --git a/src/ast/helpers.rs b/src/ast/helpers.rs index 1148cbb10e..856d2e4f07 100644 --- a/src/ast/helpers.rs +++ b/src/ast/helpers.rs @@ -253,7 +253,7 @@ pub fn to_module_and_member(target: &str) -> (&str, &str) { /// Convert a location within a file (relative to `base`) to an absolute /// position. -pub fn to_absolute(relative: &Location, base: &Location) -> Location { +pub fn to_absolute(relative: Location, base: Location) -> Location { if relative.row() == 1 { Location::new( relative.row() + base.row() - 1, diff --git a/src/autofix/fixer.rs b/src/autofix/fixer.rs index 6eb7125541..d09e6ab0c6 100644 --- a/src/autofix/fixer.rs +++ b/src/autofix/fixer.rs @@ -94,7 +94,7 @@ fn apply_fixes<'a>( } // Add the remaining content. - let slice = locator.slice_source_code_at(&last_pos); + let slice = locator.slice_source_code_at(last_pos); output.append(&slice); Cow::from(output.finish()) diff --git a/src/cache.rs b/src/cache.rs index a922b90d10..14ddcabbb0 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -109,7 +109,7 @@ pub fn init() -> Result<()> { Ok(()) } -fn write_sync(key: &u64, value: &[u8]) -> Result<(), std::io::Error> { +fn write_sync(key: u64, value: &[u8]) -> Result<(), std::io::Error> { fs::write( Path::new(cache_dir()) .join(content_dir()) @@ -118,7 +118,7 @@ fn write_sync(key: &u64, value: &[u8]) -> Result<(), std::io::Error> { ) } -fn read_sync(key: &u64) -> Result, std::io::Error> { +fn read_sync(key: u64) -> Result, std::io::Error> { fs::read( Path::new(cache_dir()) .join(content_dir()) @@ -138,7 +138,7 @@ pub fn get( return None; }; - if let Ok(encoded) = read_sync(&cache_key(path, settings, autofix)) { + if let Ok(encoded) = read_sync(cache_key(path, settings, autofix)) { match bincode::deserialize::(&encoded[..]) { Ok(CheckResult { metadata: CacheMetadata { mtime }, @@ -174,7 +174,7 @@ pub fn set( messages, }; if let Err(e) = write_sync( - &cache_key(path, settings, autofix), + cache_key(path, settings, autofix), &bincode::serialize(&check_result).unwrap(), ) { error!("Failed to write to cache: {e:?}") diff --git a/src/check_tokens.rs b/src/check_tokens.rs index b09ea660ed..783d4bc808 100644 --- a/src/check_tokens.rs +++ b/src/check_tokens.rs @@ -26,7 +26,7 @@ pub fn check_tokens( let enforce_invalid_escape_sequence = settings.enabled.contains(&CheckCode::W605); let mut state_machine: StateMachine = Default::default(); - for (start, tok, end) in tokens.iter().flatten() { + for &(start, ref tok, end) in tokens.iter().flatten() { let is_docstring = if enforce_ambiguous_unicode_character || enforce_quotes { state_machine.consume(tok) } else { diff --git a/src/directives.rs b/src/directives.rs index 4c0a541c01..3e164f2619 100644 --- a/src/directives.rs +++ b/src/directives.rs @@ -38,7 +38,7 @@ pub struct Directives { pub fn extract_directives( lxr: &[LexResult], locator: &SourceCodeLocator, - flags: &Flags, + flags: Flags, ) -> Directives { Directives { noqa_line_for: if flags.contains(Flags::NOQA) { @@ -75,13 +75,13 @@ pub fn extract_noqa_line_for(lxr: &[LexResult]) -> IntMap { /// Extract a set of lines over which to disable isort. pub fn extract_isort_exclusions(lxr: &[LexResult], locator: &SourceCodeLocator) -> IntSet { let mut exclusions: IntSet = IntSet::default(); - let mut off: Option<&Location> = None; - for (start, tok, end) in lxr.iter().flatten() { + let mut off: Option = None; + for &(start, ref tok, end) in lxr.iter().flatten() { // TODO(charlie): Modify RustPython to include the comment text in the token. if matches!(tok, Tok::Comment) { let comment_text = locator.slice_source_code_range(&Range { - location: *start, - end_location: *end, + location: start, + end_location: end, }); if off.is_some() { if comment_text == "# isort: on" { diff --git a/src/flake8_bugbear/plugins/redundant_tuple_in_exception_handler.rs b/src/flake8_bugbear/plugins/redundant_tuple_in_exception_handler.rs index 815e852feb..f2c944d0dd 100644 --- a/src/flake8_bugbear/plugins/redundant_tuple_in_exception_handler.rs +++ b/src/flake8_bugbear/plugins/redundant_tuple_in_exception_handler.rs @@ -26,7 +26,7 @@ fn match_tuple_range(located: &Located, locator: &SourceCodeLocator) -> Re for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() { if matches!(tok, Tok::Lpar) { if count == 0 { - location = Some(helpers::to_absolute(&start, &range.location)); + location = Some(helpers::to_absolute(start, range.location)); } count += 1; } @@ -34,7 +34,7 @@ fn match_tuple_range(located: &Located, locator: &SourceCodeLocator) -> Re if matches!(tok, Tok::Rpar) { count -= 1; if count == 0 { - end_location = Some(helpers::to_absolute(&end, &range.location)); + end_location = Some(helpers::to_absolute(end, range.location)); break; } } diff --git a/src/flake8_quotes/checks.rs b/src/flake8_quotes/checks.rs index 8fae48c00d..ea1c19519f 100644 --- a/src/flake8_quotes/checks.rs +++ b/src/flake8_quotes/checks.rs @@ -42,14 +42,14 @@ fn good_docstring(quote: &Quote) -> &str { pub fn quotes( locator: &SourceCodeLocator, - start: &Location, - end: &Location, + start: Location, + end: Location, is_docstring: bool, settings: &Settings, ) -> Option { let text = locator.slice_source_code_range(&Range { - location: *start, - end_location: *end, + location: start, + end_location: end, }); // Remove any prefixes (e.g., remove `u` from `u"foo"`). @@ -77,8 +77,8 @@ pub fn quotes( return Some(Check::new( CheckKind::BadQuotesDocstring(settings.docstring_quotes.clone()), Range { - location: *start, - end_location: *end, + location: start, + end_location: end, }, )); } else if is_multiline { @@ -95,8 +95,8 @@ pub fn quotes( return Some(Check::new( CheckKind::BadQuotesMultilineString(settings.multiline_quotes.clone()), Range { - location: *start, - end_location: *end, + location: start, + end_location: end, }, )); } else { @@ -113,8 +113,8 @@ pub fn quotes( return Some(Check::new( CheckKind::AvoidQuoteEscape, Range { - location: *start, - end_location: *end, + location: start, + end_location: end, }, )); } @@ -126,8 +126,8 @@ pub fn quotes( return Some(Check::new( CheckKind::BadQuotesInlineString(settings.inline_quotes.clone()), Range { - location: *start, - end_location: *end, + location: start, + end_location: end, }, )); } diff --git a/src/isort/comments.rs b/src/isort/comments.rs index ddfda0204e..f4843b43e7 100644 --- a/src/isort/comments.rs +++ b/src/isort/comments.rs @@ -22,8 +22,8 @@ pub fn collect_comments<'a>(range: &Range, locator: &'a SourceCodeLocator) -> Ve .flatten() .filter_map(|(start, tok, end)| { if matches!(tok, Tok::Comment) { - let start = helpers::to_absolute(&start, &range.location); - let end = helpers::to_absolute(&end, &range.location); + let start = helpers::to_absolute(start, range.location); + let end = helpers::to_absolute(end, range.location); Some(Comment { value: locator.slice_source_code_range(&Range { location: start, diff --git a/src/isort/format.rs b/src/isort/format.rs index 4526b6670e..26bb05e811 100644 --- a/src/isort/format.rs +++ b/src/isort/format.rs @@ -38,7 +38,7 @@ pub fn format_import_from( import_from: &ImportFromData, comments: &CommentSet, aliases: &[(AliasData, CommentSet)], - line_length: &usize, + line_length: usize, is_first: bool, ) -> String { // We can only inline if: (1) none of the aliases have atop comments, and (3) @@ -54,7 +54,7 @@ pub fn format_import_from( { let (single_line, import_length) = format_single_line(import_from, comments, aliases, is_first); - if import_length <= *line_length { + if import_length <= line_length { return single_line; } } diff --git a/src/isort/mod.rs b/src/isort/mod.rs index fc50929870..7b543560c1 100644 --- a/src/isort/mod.rs +++ b/src/isort/mod.rs @@ -400,7 +400,7 @@ fn sort_imports(block: ImportBlock) -> OrderedImportBlock { pub fn format_imports( block: &[&Stmt], comments: Vec, - line_length: &usize, + line_length: usize, src: &[PathBuf], known_first_party: &BTreeSet, known_third_party: &BTreeSet, diff --git a/src/isort/plugins.rs b/src/isort/plugins.rs index 9b862d9f72..3068421fab 100644 --- a/src/isort/plugins.rs +++ b/src/isort/plugins.rs @@ -55,7 +55,7 @@ pub fn check_imports( let expected = format_imports( &body, comments, - &(settings.line_length - indentation.len()), + settings.line_length - indentation.len(), &settings.src, &settings.isort.known_first_party, &settings.isort.known_third_party, diff --git a/src/lib.rs b/src/lib.rs index 76de99f8f3..e85b30b32d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -89,7 +89,7 @@ pub fn check(path: &Path, contents: &str, autofix: bool) -> Result> { let directives = directives::extract_directives( &tokens, &locator, - &directives::Flags::from_settings(&settings), + directives::Flags::from_settings(&settings), ); // Generate checks. diff --git a/src/linter.rs b/src/linter.rs index 758dc73bc3..6f25f846d7 100644 --- a/src/linter.rs +++ b/src/linter.rs @@ -151,7 +151,7 @@ pub fn lint_stdin( let directives = directives::extract_directives( &tokens, &locator, - &directives::Flags::from_settings(settings), + directives::Flags::from_settings(settings), ); // Generate checks. @@ -215,7 +215,7 @@ pub fn lint_path( let directives = directives::extract_directives( &tokens, &locator, - &directives::Flags::from_settings(settings), + directives::Flags::from_settings(settings), ); // Generate checks. @@ -269,7 +269,7 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result { let directives = directives::extract_directives( &tokens, &locator, - &directives::Flags::from_settings(settings), + directives::Flags::from_settings(settings), ); // Generate checks. @@ -310,7 +310,7 @@ pub fn test_path(path: &Path, settings: &Settings, autofix: &fixer::Mode) -> Res let directives = directives::extract_directives( &tokens, &locator, - &directives::Flags::from_settings(settings), + directives::Flags::from_settings(settings), ); check_path( path, diff --git a/src/pycodestyle/checks.rs b/src/pycodestyle/checks.rs index 13730f6107..31bc6c2c0f 100644 --- a/src/pycodestyle/checks.rs +++ b/src/pycodestyle/checks.rs @@ -103,14 +103,14 @@ fn extract_quote(text: &str) -> &str { /// W605 pub fn invalid_escape_sequence( locator: &SourceCodeLocator, - start: &Location, - end: &Location, + start: Location, + end: Location, ) -> Vec { let mut checks = vec![]; let text = locator.slice_source_code_range(&Range { - location: *start, - end_location: *end, + location: start, + end_location: end, }); // Determine whether the string is single- or triple-quoted. diff --git a/src/pyupgrade/fixes.rs b/src/pyupgrade/fixes.rs index b329095911..f6701d5f6c 100644 --- a/src/pyupgrade/fixes.rs +++ b/src/pyupgrade/fixes.rs @@ -13,7 +13,7 @@ use crate::source_code_locator::SourceCodeLocator; /// Generate a fix to remove a base from a ClassDef statement. pub fn remove_class_def_base( locator: &SourceCodeLocator, - stmt_at: &Location, + stmt_at: Location, expr_at: Location, bases: &[Expr], keywords: &[Keyword], @@ -28,7 +28,7 @@ pub fn remove_class_def_base( for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() { if matches!(tok, Tok::Lpar) { if count == 0 { - fix_start = Some(helpers::to_absolute(&start, stmt_at)); + fix_start = Some(helpers::to_absolute(start, stmt_at)); } count += 1; } @@ -36,7 +36,7 @@ pub fn remove_class_def_base( if matches!(tok, Tok::Rpar) { count -= 1; if count == 0 { - fix_end = Some(helpers::to_absolute(&end, stmt_at)); + fix_end = Some(helpers::to_absolute(end, stmt_at)); break; } } @@ -59,7 +59,7 @@ pub fn remove_class_def_base( let mut fix_end: Option = None; let mut seen_comma = false; for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() { - let start = helpers::to_absolute(&start, stmt_at); + let start = helpers::to_absolute(start, stmt_at); if seen_comma { if matches!(tok, Tok::Newline) { fix_end = Some(end); @@ -86,8 +86,8 @@ pub fn remove_class_def_base( let mut fix_start: Option = None; let mut fix_end: Option = None; for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() { - let start = helpers::to_absolute(&start, stmt_at); - let end = helpers::to_absolute(&end, stmt_at); + let start = helpers::to_absolute(start, stmt_at); + let end = helpers::to_absolute(end, stmt_at); if start == expr_at { fix_end = Some(end); break; diff --git a/src/pyupgrade/plugins/useless_object_inheritance.rs b/src/pyupgrade/plugins/useless_object_inheritance.rs index 3371c78a41..62cb65e40d 100644 --- a/src/pyupgrade/plugins/useless_object_inheritance.rs +++ b/src/pyupgrade/plugins/useless_object_inheritance.rs @@ -17,7 +17,7 @@ pub fn useless_object_inheritance( if checker.patch(check.kind.code()) { if let Some(fix) = pyupgrade::fixes::remove_class_def_base( checker.locator, - &stmt.location, + stmt.location, check.location, bases, keywords, diff --git a/src/rules/checks.rs b/src/rules/checks.rs index 7d5ced38e0..9ed5849881 100644 --- a/src/rules/checks.rs +++ b/src/rules/checks.rs @@ -1603,8 +1603,8 @@ pub enum Context { pub fn ambiguous_unicode_character( locator: &SourceCodeLocator, - start: &Location, - end: &Location, + start: Location, + end: Location, context: Context, settings: &Settings, autofix: &fixer::Mode, @@ -1612,8 +1612,8 @@ pub fn ambiguous_unicode_character( let mut checks = vec![]; let text = locator.slice_source_code_range(&Range { - location: *start, - end_location: *end, + location: start, + end_location: end, }); let mut col_offset = 0; diff --git a/src/source_code_locator.rs b/src/source_code_locator.rs index 0f30cabdcd..f7864d18a0 100644 --- a/src/source_code_locator.rs +++ b/src/source_code_locator.rs @@ -25,7 +25,7 @@ impl<'a> SourceCodeLocator<'a> { self.rope.get_or_init(|| Rope::from_str(self.contents)) } - pub fn slice_source_code_at(&self, location: &Location) -> Cow<'_, str> { + pub fn slice_source_code_at(&self, location: Location) -> Cow<'_, str> { let rope = self.get_or_init_rope(); let offset = rope.line_to_char(location.row() - 1) + location.column(); Cow::from(rope.slice(offset..))