Implement Ranged on more structs (#6639)

## Summary

I noticed some inconsistencies around uses of `.range.start()`, structs
that have a `TextRange` field but don't implement `Ranged`, etc.

## Test Plan

`cargo test`
This commit is contained in:
Charlie Marsh 2023-08-17 11:22:39 -04:00 committed by GitHub
parent a70807e1e1
commit db1c556508
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
66 changed files with 221 additions and 176 deletions

View file

@ -1,4 +1,5 @@
use ruff_diagnostics::{Diagnostic, Fix}; use ruff_diagnostics::{Diagnostic, Fix};
use ruff_python_ast::Ranged;
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;
use crate::codes::Rule; use crate::codes::Rule;
@ -29,7 +30,7 @@ pub(crate) fn bindings(checker: &mut Checker) {
pyflakes::rules::UnusedVariable { pyflakes::rules::UnusedVariable {
name: binding.name(checker.locator).to_string(), name: binding.name(checker.locator).to_string(),
}, },
binding.range, binding.range(),
); );
if checker.patch(Rule::UnusedVariable) { if checker.patch(Rule::UnusedVariable) {
diagnostic.try_set_fix(|| { diagnostic.try_set_fix(|| {

View file

@ -1,4 +1,5 @@
use ruff_diagnostics::Diagnostic; use ruff_diagnostics::Diagnostic;
use ruff_python_ast::Ranged;
use ruff_python_semantic::analyze::{branch_detection, visibility}; use ruff_python_semantic::analyze::{branch_detection, visibility};
use ruff_python_semantic::{Binding, BindingKind, ScopeKind}; use ruff_python_semantic::{Binding, BindingKind, ScopeKind};
@ -81,7 +82,7 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
pylint::rules::GlobalVariableNotAssigned { pylint::rules::GlobalVariableNotAssigned {
name: (*name).to_string(), name: (*name).to_string(),
}, },
binding.range, binding.range(),
)); ));
} }
} }
@ -122,14 +123,14 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
} }
#[allow(deprecated)] #[allow(deprecated)]
let line = checker.locator.compute_line_index(shadowed.range.start()); let line = checker.locator.compute_line_index(shadowed.start());
checker.diagnostics.push(Diagnostic::new( checker.diagnostics.push(Diagnostic::new(
pyflakes::rules::ImportShadowedByLoopVar { pyflakes::rules::ImportShadowedByLoopVar {
name: name.to_string(), name: name.to_string(),
line, line,
}, },
binding.range, binding.range(),
)); ));
} }
} }
@ -218,13 +219,13 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
} }
#[allow(deprecated)] #[allow(deprecated)]
let line = checker.locator.compute_line_index(shadowed.range.start()); let line = checker.locator.compute_line_index(shadowed.start());
let mut diagnostic = Diagnostic::new( let mut diagnostic = Diagnostic::new(
pyflakes::rules::RedefinedWhileUnused { pyflakes::rules::RedefinedWhileUnused {
name: (*name).to_string(), name: (*name).to_string(),
line, line,
}, },
binding.range, binding.range(),
); );
if let Some(range) = binding.parent_range(&checker.semantic) { if let Some(range) = binding.parent_range(&checker.semantic) {
diagnostic.set_parent(range.start()); diagnostic.set_parent(range.start());

View file

@ -1855,7 +1855,7 @@ impl<'a> Checker<'a> {
.map(|binding_id| &self.semantic.bindings[binding_id]) .map(|binding_id| &self.semantic.bindings[binding_id])
.filter_map(|binding| match &binding.kind { .filter_map(|binding| match &binding.kind {
BindingKind::Export(Export { names }) => { BindingKind::Export(Export { names }) => {
Some(names.iter().map(|name| (*name, binding.range))) Some(names.iter().map(|name| (*name, binding.range())))
} }
_ => None, _ => None,
}) })

View file

@ -1,10 +1,10 @@
use ruff_python_parser::lexer::LexResult;
use ruff_text_size::TextRange;
use ruff_diagnostics::{Diagnostic, DiagnosticKind}; use ruff_diagnostics::{Diagnostic, DiagnosticKind};
use ruff_python_ast::Ranged;
use ruff_python_codegen::Stylist; use ruff_python_codegen::Stylist;
use ruff_python_parser::lexer::LexResult;
use ruff_python_parser::TokenKind; use ruff_python_parser::TokenKind;
use ruff_source_file::Locator; use ruff_source_file::Locator;
use ruff_text_size::TextRange;
use crate::registry::{AsRule, Rule}; use crate::registry::{AsRule, Rule};
use crate::rules::pycodestyle::rules::logical_lines::{ use crate::rules::pycodestyle::rules::logical_lines::{

View file

@ -2,9 +2,8 @@ use std::fmt::{Debug, Formatter};
use std::ops::Deref; use std::ops::Deref;
use ruff_python_ast::{Expr, Ranged}; use ruff_python_ast::{Expr, Ranged};
use ruff_text_size::{TextRange, TextSize};
use ruff_python_semantic::Definition; use ruff_python_semantic::Definition;
use ruff_text_size::TextRange;
pub(crate) mod extraction; pub(crate) mod extraction;
pub(crate) mod google; pub(crate) mod google;
@ -28,43 +27,34 @@ impl<'a> Docstring<'a> {
DocstringBody { docstring: self } DocstringBody { docstring: self }
} }
pub(crate) fn start(&self) -> TextSize {
self.expr.start()
}
pub(crate) fn end(&self) -> TextSize {
self.expr.end()
}
pub(crate) fn range(&self) -> TextRange {
self.expr.range()
}
pub(crate) fn leading_quote(&self) -> &'a str { pub(crate) fn leading_quote(&self) -> &'a str {
&self.contents[TextRange::up_to(self.body_range.start())] &self.contents[TextRange::up_to(self.body_range.start())]
} }
} }
impl Ranged for Docstring<'_> {
fn range(&self) -> TextRange {
self.expr.range()
}
}
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub(crate) struct DocstringBody<'a> { pub(crate) struct DocstringBody<'a> {
docstring: &'a Docstring<'a>, docstring: &'a Docstring<'a>,
} }
impl<'a> DocstringBody<'a> { impl<'a> DocstringBody<'a> {
#[inline]
pub(crate) fn start(self) -> TextSize {
self.range().start()
}
pub(crate) fn range(self) -> TextRange {
self.docstring.body_range + self.docstring.start()
}
pub(crate) fn as_str(self) -> &'a str { pub(crate) fn as_str(self) -> &'a str {
&self.docstring.contents[self.docstring.body_range] &self.docstring.contents[self.docstring.body_range]
} }
} }
impl Ranged for DocstringBody<'_> {
fn range(&self) -> TextRange {
self.docstring.body_range + self.docstring.start()
}
}
impl Deref for DocstringBody<'_> { impl Deref for DocstringBody<'_> {
type Target = str; type Target = str;

View file

@ -2,6 +2,7 @@ use std::fmt::{Debug, Formatter};
use std::iter::FusedIterator; use std::iter::FusedIterator;
use ruff_python_ast::docstrings::{leading_space, leading_words}; use ruff_python_ast::docstrings::{leading_space, leading_words};
use ruff_python_ast::Ranged;
use ruff_text_size::{TextLen, TextRange, TextSize}; use ruff_text_size::{TextLen, TextRange, TextSize};
use strum_macros::EnumIter; use strum_macros::EnumIter;
@ -366,6 +367,12 @@ impl<'a> SectionContext<'a> {
} }
} }
impl Ranged for SectionContext<'_> {
fn range(&self) -> TextRange {
self.range()
}
}
impl Debug for SectionContext<'_> { impl Debug for SectionContext<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SectionContext") f.debug_struct("SectionContext")

View file

@ -194,7 +194,7 @@ impl<'a> Importer<'a> {
// import and the current location, and thus the symbol would not be available). It's also // import and the current location, and thus the symbol would not be available). It's also
// unclear whether should add an import statement at the start of the file, since it could // unclear whether should add an import statement at the start of the file, since it could
// be shadowed between the import and the current location. // be shadowed between the import and the current location.
if imported_name.range().start() > at { if imported_name.start() > at {
return Some(Err(ResolutionError::ImportAfterUsage)); return Some(Err(ResolutionError::ImportAfterUsage));
} }

View file

@ -4,6 +4,7 @@ use anyhow::{anyhow, Result};
use itertools::Itertools; use itertools::Itertools;
use ruff_diagnostics::Edit; use ruff_diagnostics::Edit;
use ruff_python_ast::Ranged;
use ruff_python_semantic::{Binding, BindingKind, Scope, ScopeId, SemanticModel}; use ruff_python_semantic::{Binding, BindingKind, Scope, ScopeId, SemanticModel};
pub(crate) struct Renamer; pub(crate) struct Renamer;
@ -220,12 +221,12 @@ impl Renamer {
BindingKind::Import(_) | BindingKind::FromImport(_) => { BindingKind::Import(_) | BindingKind::FromImport(_) => {
if binding.is_alias() { if binding.is_alias() {
// Ex) Rename `import pandas as alias` to `import pandas as pd`. // Ex) Rename `import pandas as alias` to `import pandas as pd`.
Some(Edit::range_replacement(target.to_string(), binding.range)) Some(Edit::range_replacement(target.to_string(), binding.range()))
} else { } else {
// Ex) Rename `import pandas` to `import pandas as pd`. // Ex) Rename `import pandas` to `import pandas as pd`.
Some(Edit::range_replacement( Some(Edit::range_replacement(
format!("{name} as {target}"), format!("{name} as {target}"),
binding.range, binding.range(),
)) ))
} }
} }
@ -234,7 +235,7 @@ impl Renamer {
let module_name = import.call_path.first().unwrap(); let module_name = import.call_path.first().unwrap();
Some(Edit::range_replacement( Some(Edit::range_replacement(
format!("{module_name} as {target}"), format!("{module_name} as {target}"),
binding.range, binding.range(),
)) ))
} }
// Avoid renaming builtins and other "special" bindings. // Avoid renaming builtins and other "special" bindings.
@ -254,7 +255,7 @@ impl Renamer {
| BindingKind::FunctionDefinition(_) | BindingKind::FunctionDefinition(_)
| BindingKind::Deletion | BindingKind::Deletion
| BindingKind::UnboundException(_) => { | BindingKind::UnboundException(_) => {
Some(Edit::range_replacement(target.to_string(), binding.range)) Some(Edit::range_replacement(target.to_string(), binding.range()))
} }
} }
} }

View file

@ -163,7 +163,7 @@ pub(crate) fn unused_loop_control_variable(checker: &mut Checker, target: &Expr,
if scope if scope
.get_all(name) .get_all(name)
.map(|binding_id| checker.semantic().binding(binding_id)) .map(|binding_id| checker.semantic().binding(binding_id))
.filter(|binding| binding.range.start() >= expr.range().start()) .filter(|binding| binding.start() >= expr.start())
.all(|binding| !binding.is_used()) .all(|binding| !binding.is_used())
{ {
diagnostic.set_fix(Fix::suggested(Edit::range_replacement( diagnostic.set_fix(Fix::suggested(Edit::range_replacement(

View file

@ -2,6 +2,7 @@ use rustc_hash::FxHashMap;
use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, Violation}; use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_semantic::{Binding, Imported}; use ruff_python_semantic::{Binding, Imported};
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;
@ -76,7 +77,7 @@ pub(crate) fn unconventional_import_alias(
name: qualified_name, name: qualified_name,
asname: expected_alias.to_string(), asname: expected_alias.to_string(),
}, },
binding.range, binding.range(),
); );
if checker.patch(diagnostic.kind.rule()) { if checker.patch(diagnostic.kind.rule()) {
if checker.semantic().is_available(expected_alias) { if checker.semantic().is_available(expected_alias) {

View file

@ -1,5 +1,6 @@
use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, Violation}; use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_semantic::Imported; use ruff_python_semantic::Imported;
use ruff_python_semantic::{Binding, BindingKind}; use ruff_python_semantic::{Binding, BindingKind};
@ -63,7 +64,7 @@ pub(crate) fn unaliased_collections_abc_set_import(
return None; return None;
} }
let mut diagnostic = Diagnostic::new(UnaliasedCollectionsAbcSetImport, binding.range); let mut diagnostic = Diagnostic::new(UnaliasedCollectionsAbcSetImport, binding.range());
if checker.patch(diagnostic.kind.rule()) { if checker.patch(diagnostic.kind.rule()) {
if checker.semantic().is_available("AbstractSet") { if checker.semantic().is_available("AbstractSet") {
diagnostic.try_set_fix(|| { diagnostic.try_set_fix(|| {

View file

@ -1,6 +1,6 @@
use ruff_diagnostics::{Diagnostic, Violation}; use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::{self as ast, Expr, Stmt}; use ruff_python_ast::{self as ast, Expr, Ranged, Stmt};
use ruff_python_semantic::Scope; use ruff_python_semantic::Scope;
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;
@ -192,7 +192,7 @@ pub(crate) fn unused_private_type_var(
UnusedPrivateTypeVar { UnusedPrivateTypeVar {
name: id.to_string(), name: id.to_string(),
}, },
binding.range, binding.range(),
)); ));
} }
} }
@ -234,7 +234,7 @@ pub(crate) fn unused_private_protocol(
UnusedPrivateProtocol { UnusedPrivateProtocol {
name: class_def.name.to_string(), name: class_def.name.to_string(),
}, },
binding.range, binding.range(),
)); ));
} }
} }
@ -280,7 +280,7 @@ pub(crate) fn unused_private_type_alias(
UnusedPrivateTypeAlias { UnusedPrivateTypeAlias {
name: id.to_string(), name: id.to_string(),
}, },
binding.range, binding.range(),
)); ));
} }
} }
@ -321,7 +321,7 @@ pub(crate) fn unused_private_typed_dict(
UnusedPrivateTypedDict { UnusedPrivateTypedDict {
name: class_def.name.to_string(), name: class_def.name.to_string(),
}, },
binding.range, binding.range(),
)); ));
} }
} }

View file

@ -558,7 +558,7 @@ fn unnecessary_assign(checker: &mut Checker, stack: &Stack) {
// Replace from the start of the assignment statement to the end of the equals // Replace from the start of the assignment statement to the end of the equals
// sign. // sign.
TextRange::new( TextRange::new(
assign.range().start(), assign.start(),
assign assign
.range() .range()
.start() .start()

View file

@ -693,7 +693,7 @@ pub(crate) fn use_ternary_operator(checker: &mut Checker, stmt: &Stmt) {
fn body_range(branch: &IfElifBranch, locator: &Locator) -> TextRange { fn body_range(branch: &IfElifBranch, locator: &Locator) -> TextRange {
TextRange::new( TextRange::new(
locator.line_end(branch.test.end()), locator.line_end(branch.test.end()),
locator.line_end(branch.range.end()), locator.line_end(branch.end()),
) )
} }
@ -731,7 +731,7 @@ pub(crate) fn if_with_same_arms(checker: &mut Checker, locator: &Locator, stmt_i
checker.diagnostics.push(Diagnostic::new( checker.diagnostics.push(Diagnostic::new(
IfWithSameArms, IfWithSameArms,
TextRange::new(current_branch.range.start(), following_branch.range.end()), TextRange::new(current_branch.start(), following_branch.end()),
)); ));
} }
} }

View file

@ -5,6 +5,7 @@ use rustc_hash::FxHashMap;
use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, Violation}; use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_semantic::{AnyImport, Imported, ResolvedReferenceId, Scope, StatementId}; use ruff_python_semantic::{AnyImport, Imported, ResolvedReferenceId, Scope, StatementId};
use ruff_text_size::TextRange; use ruff_text_size::TextRange;
@ -101,11 +102,11 @@ pub(crate) fn runtime_import_in_type_checking_block(
let import = ImportBinding { let import = ImportBinding {
import, import,
reference_id, reference_id,
range: binding.range, range: binding.range(),
parent_range: binding.parent_range(checker.semantic()), parent_range: binding.parent_range(checker.semantic()),
}; };
if checker.rule_is_ignored(Rule::RuntimeImportInTypeCheckingBlock, import.range.start()) if checker.rule_is_ignored(Rule::RuntimeImportInTypeCheckingBlock, import.start())
|| import.parent_range.is_some_and(|parent_range| { || import.parent_range.is_some_and(|parent_range| {
checker.rule_is_ignored( checker.rule_is_ignored(
Rule::RuntimeImportInTypeCheckingBlock, Rule::RuntimeImportInTypeCheckingBlock,
@ -192,6 +193,12 @@ struct ImportBinding<'a> {
parent_range: Option<TextRange>, parent_range: Option<TextRange>,
} }
impl Ranged for ImportBinding<'_> {
fn range(&self) -> TextRange {
self.range
}
}
/// Generate a [`Fix`] to remove runtime imports from a type-checking block. /// Generate a [`Fix`] to remove runtime imports from a type-checking block.
fn fix_imports( fn fix_imports(
checker: &Checker, checker: &Checker,
@ -211,7 +218,7 @@ fn fix_imports(
let at = imports let at = imports
.iter() .iter()
.map(|ImportBinding { reference_id, .. }| { .map(|ImportBinding { reference_id, .. }| {
checker.semantic().reference(*reference_id).range().start() checker.semantic().reference(*reference_id).start()
}) })
.min() .min()
.expect("Expected at least one import"); .expect("Expected at least one import");

View file

@ -5,6 +5,7 @@ use rustc_hash::FxHashMap;
use ruff_diagnostics::{AutofixKind, Diagnostic, DiagnosticKind, Fix, Violation}; use ruff_diagnostics::{AutofixKind, Diagnostic, DiagnosticKind, Fix, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_semantic::{AnyImport, Binding, Imported, ResolvedReferenceId, Scope, StatementId}; use ruff_python_semantic::{AnyImport, Binding, Imported, ResolvedReferenceId, Scope, StatementId};
use ruff_text_size::TextRange; use ruff_text_size::TextRange;
@ -308,11 +309,11 @@ pub(crate) fn typing_only_runtime_import(
let import = ImportBinding { let import = ImportBinding {
import, import,
reference_id, reference_id,
range: binding.range, range: binding.range(),
parent_range: binding.parent_range(checker.semantic()), parent_range: binding.parent_range(checker.semantic()),
}; };
if checker.rule_is_ignored(rule_for(import_type), import.range.start()) if checker.rule_is_ignored(rule_for(import_type), import.start())
|| import.parent_range.is_some_and(|parent_range| { || import.parent_range.is_some_and(|parent_range| {
checker.rule_is_ignored(rule_for(import_type), parent_range.start()) checker.rule_is_ignored(rule_for(import_type), parent_range.start())
}) })
@ -390,6 +391,12 @@ struct ImportBinding<'a> {
parent_range: Option<TextRange>, parent_range: Option<TextRange>,
} }
impl Ranged for ImportBinding<'_> {
fn range(&self) -> TextRange {
self.range
}
}
/// Return the [`Rule`] for the given import type. /// Return the [`Rule`] for the given import type.
fn rule_for(import_type: ImportType) -> Rule { fn rule_for(import_type: ImportType) -> Rule {
match import_type { match import_type {
@ -456,7 +463,7 @@ fn fix_imports(
let at = imports let at = imports
.iter() .iter()
.map(|ImportBinding { reference_id, .. }| { .map(|ImportBinding { reference_id, .. }| {
checker.semantic().reference(*reference_id).range().start() checker.semantic().reference(*reference_id).start()
}) })
.min() .min()
.expect("Expected at least one import"); .expect("Expected at least one import");

View file

@ -2,7 +2,7 @@ use std::iter;
use regex::Regex; use regex::Regex;
use ruff_python_ast as ast; use ruff_python_ast as ast;
use ruff_python_ast::{Parameter, Parameters}; use ruff_python_ast::{Parameter, Parameters, Ranged};
use ruff_diagnostics::DiagnosticKind; use ruff_diagnostics::DiagnosticKind;
use ruff_diagnostics::{Diagnostic, Violation}; use ruff_diagnostics::{Diagnostic, Violation};
@ -303,7 +303,7 @@ fn call<'a>(
{ {
Some(Diagnostic::new( Some(Diagnostic::new(
argumentable.check_for(arg.name.to_string()), argumentable.check_for(arg.name.to_string()),
binding.range, binding.range(),
)) ))
} else { } else {
None None

View file

@ -1,10 +1,9 @@
use std::borrow::Cow; use std::borrow::Cow;
use ruff_python_ast::PySourceType; use ruff_python_ast::{PySourceType, Ranged};
use ruff_python_parser::{lexer, AsMode, Tok}; use ruff_python_parser::{lexer, AsMode, Tok};
use ruff_text_size::{TextRange, TextSize};
use ruff_source_file::Locator; use ruff_source_file::Locator;
use ruff_text_size::TextRange;
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct Comment<'a> { pub(crate) struct Comment<'a> {
@ -12,13 +11,9 @@ pub(crate) struct Comment<'a> {
pub(crate) range: TextRange, pub(crate) range: TextRange,
} }
impl Comment<'_> { impl Ranged for Comment<'_> {
pub(crate) const fn start(&self) -> TextSize { fn range(&self) -> TextRange {
self.range.start() self.range
}
pub(crate) const fn end(&self) -> TextSize {
self.range.end()
} }
} }

View file

@ -173,7 +173,7 @@ fn is_unused(expr: &Expr, semantic: &SemanticModel) -> bool {
scope scope
.get_all(id) .get_all(id)
.map(|binding_id| semantic.binding(binding_id)) .map(|binding_id| semantic.binding(binding_id))
.filter(|binding| binding.range.start() >= expr.range().start()) .filter(|binding| binding.start() >= expr.start())
.all(|binding| !binding.is_used()) .all(|binding| !binding.is_used())
} }
_ => false, _ => false,

View file

@ -129,7 +129,7 @@ pub(crate) fn invalid_escape_sequence(
for diagnostic in &mut invalid_escape_sequence { for diagnostic in &mut invalid_escape_sequence {
diagnostic.set_fix(Fix::automatic(Edit::insertion( diagnostic.set_fix(Fix::automatic(Edit::insertion(
r"\".to_string(), r"\".to_string(),
diagnostic.range().start() + TextSize::from(1), diagnostic.start() + TextSize::from(1),
))); )));
} }
} else { } else {

View file

@ -3,6 +3,7 @@ use ruff_diagnostics::Diagnostic;
use ruff_diagnostics::Edit; use ruff_diagnostics::Edit;
use ruff_diagnostics::Fix; use ruff_diagnostics::Fix;
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_parser::TokenKind; use ruff_python_parser::TokenKind;
use ruff_text_size::TextRange; use ruff_text_size::TextRange;

View file

@ -1,9 +1,9 @@
use ruff_text_size::TextSize;
use ruff_diagnostics::Edit; use ruff_diagnostics::Edit;
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Fix}; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Fix};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_parser::TokenKind; use ruff_python_parser::TokenKind;
use ruff_text_size::TextSize;
use crate::checkers::logical_lines::LogicalLinesContext; use crate::checkers::logical_lines::LogicalLinesContext;

View file

@ -1,5 +1,6 @@
use ruff_diagnostics::Violation; use ruff_diagnostics::Violation;
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_parser::TokenKind; use ruff_python_parser::TokenKind;
use crate::checkers::logical_lines::LogicalLinesContext; use crate::checkers::logical_lines::LogicalLinesContext;

View file

@ -1,5 +1,6 @@
use ruff_diagnostics::{DiagnosticKind, Violation}; use ruff_diagnostics::{DiagnosticKind, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_parser::TokenKind; use ruff_python_parser::TokenKind;
use crate::checkers::logical_lines::LogicalLinesContext; use crate::checkers::logical_lines::LogicalLinesContext;

View file

@ -13,6 +13,7 @@ use std::fmt::{Debug, Formatter};
use std::iter::FusedIterator; use std::iter::FusedIterator;
use bitflags::bitflags; use bitflags::bitflags;
use ruff_python_ast::Ranged;
use ruff_python_parser::lexer::LexResult; use ruff_python_parser::lexer::LexResult;
use ruff_text_size::{TextLen, TextRange, TextSize}; use ruff_text_size::{TextLen, TextRange, TextSize};
@ -310,22 +311,11 @@ impl LogicalLineToken {
pub(crate) const fn kind(&self) -> TokenKind { pub(crate) const fn kind(&self) -> TokenKind {
self.kind self.kind
} }
}
/// Returns the token's start location impl Ranged for LogicalLineToken {
#[inline]
pub(crate) const fn start(&self) -> TextSize {
self.range.start()
}
/// Returns the token's end location
#[inline]
pub(crate) const fn end(&self) -> TextSize {
self.range.end()
}
/// Returns a tuple with the token's `(start, end)` locations /// Returns a tuple with the token's `(start, end)` locations
#[inline] fn range(&self) -> TextRange {
pub(crate) const fn range(&self) -> TextRange {
self.range self.range
} }
} }

View file

@ -1,8 +1,8 @@
use ruff_text_size::TextRange;
use ruff_diagnostics::Violation; use ruff_diagnostics::Violation;
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_parser::TokenKind; use ruff_python_parser::TokenKind;
use ruff_text_size::TextRange;
use crate::checkers::logical_lines::LogicalLinesContext; use crate::checkers::logical_lines::LogicalLinesContext;

View file

@ -1,7 +1,7 @@
use ruff_text_size::TextRange;
use ruff_diagnostics::Violation; use ruff_diagnostics::Violation;
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_text_size::TextRange;
use crate::checkers::logical_lines::LogicalLinesContext; use crate::checkers::logical_lines::LogicalLinesContext;

View file

@ -1,8 +1,8 @@
use ruff_text_size::{TextRange, TextSize};
use ruff_diagnostics::Violation; use ruff_diagnostics::Violation;
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_parser::TokenKind; use ruff_python_parser::TokenKind;
use ruff_text_size::{TextRange, TextSize};
use crate::checkers::logical_lines::LogicalLinesContext; use crate::checkers::logical_lines::LogicalLinesContext;
use crate::rules::pycodestyle::rules::logical_lines::{LogicalLine, LogicalLineToken}; use crate::rules::pycodestyle::rules::logical_lines::{LogicalLine, LogicalLineToken};

View file

@ -1,10 +1,10 @@
use ruff_text_size::{TextLen, TextRange, TextSize};
use ruff_diagnostics::Violation; use ruff_diagnostics::Violation;
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_parser::TokenKind; use ruff_python_parser::TokenKind;
use ruff_python_trivia::PythonWhitespace; use ruff_python_trivia::PythonWhitespace;
use ruff_source_file::Locator; use ruff_source_file::Locator;
use ruff_text_size::{TextLen, TextRange, TextSize};
use crate::checkers::logical_lines::LogicalLinesContext; use crate::checkers::logical_lines::LogicalLinesContext;
use crate::rules::pycodestyle::rules::logical_lines::LogicalLine; use crate::rules::pycodestyle::rules::logical_lines::LogicalLine;

View file

@ -1,8 +1,8 @@
use ruff_text_size::{TextRange, TextSize};
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_parser::TokenKind; use ruff_python_parser::TokenKind;
use ruff_text_size::{TextRange, TextSize};
use crate::checkers::logical_lines::LogicalLinesContext; use crate::checkers::logical_lines::LogicalLinesContext;
use crate::rules::pycodestyle::rules::logical_lines::LogicalLine; use crate::rules::pycodestyle::rules::logical_lines::LogicalLine;

View file

@ -2,6 +2,7 @@ use memchr::memchr_iter;
use ruff_diagnostics::{Diagnostic, Violation}; use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;
use crate::docstrings::Docstring; use crate::docstrings::Docstring;

View file

@ -1,5 +1,6 @@
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation}; use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_source_file::{UniversalNewlineIterator, UniversalNewlines}; use ruff_source_file::{UniversalNewlineIterator, UniversalNewlines};
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;

View file

@ -3,6 +3,7 @@ use strum::IntoEnumIterator;
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_source_file::{UniversalNewlineIterator, UniversalNewlines}; use ruff_source_file::{UniversalNewlineIterator, UniversalNewlines};
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;

View file

@ -3,6 +3,7 @@ use strum::IntoEnumIterator;
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix}; use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_source_file::{UniversalNewlineIterator, UniversalNewlines}; use ruff_source_file::{UniversalNewlineIterator, UniversalNewlines};
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;

View file

@ -1,10 +1,10 @@
use ruff_text_size::{TextLen, TextRange};
use ruff_diagnostics::{AlwaysAutofixableViolation, Violation}; use ruff_diagnostics::{AlwaysAutofixableViolation, Violation};
use ruff_diagnostics::{Diagnostic, Edit, Fix}; use ruff_diagnostics::{Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::docstrings::{clean_space, leading_space}; use ruff_python_ast::docstrings::{clean_space, leading_space};
use ruff_python_ast::Ranged;
use ruff_source_file::NewlineWithTrailingNewline; use ruff_source_file::NewlineWithTrailingNewline;
use ruff_text_size::{TextLen, TextRange};
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;
use crate::docstrings::Docstring; use crate::docstrings::Docstring;

View file

@ -1,5 +1,6 @@
use ruff_diagnostics::{Diagnostic, Violation}; use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_source_file::UniversalNewlines; use ruff_source_file::UniversalNewlines;
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;

View file

@ -1,8 +1,8 @@
use ruff_text_size::{TextLen, TextRange};
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation}; use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_source_file::NewlineWithTrailingNewline; use ruff_source_file::NewlineWithTrailingNewline;
use ruff_text_size::{TextLen, TextRange};
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;
use crate::docstrings::Docstring; use crate::docstrings::Docstring;

View file

@ -6,6 +6,7 @@ use once_cell::sync::Lazy;
use ruff_diagnostics::{Diagnostic, Violation}; use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::call_path::{from_qualified_name, CallPath}; use ruff_python_ast::call_path::{from_qualified_name, CallPath};
use ruff_python_ast::Ranged;
use ruff_python_semantic::analyze::visibility::{is_property, is_test}; use ruff_python_semantic::analyze::visibility::{is_property, is_test};
use ruff_source_file::UniversalNewlines; use ruff_source_file::UniversalNewlines;

View file

@ -1,5 +1,6 @@
use ruff_diagnostics::{Diagnostic, Violation}; use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;
use crate::docstrings::Docstring; use crate::docstrings::Docstring;

View file

@ -1,6 +1,7 @@
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation}; use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::str::{leading_quote, trailing_quote}; use ruff_python_ast::str::{leading_quote, trailing_quote};
use ruff_python_ast::Ranged;
use ruff_source_file::NewlineWithTrailingNewline; use ruff_source_file::NewlineWithTrailingNewline;
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;

View file

@ -8,7 +8,7 @@ use ruff_diagnostics::{Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::docstrings::{clean_space, leading_space}; use ruff_python_ast::docstrings::{clean_space, leading_space};
use ruff_python_ast::identifier::Identifier; use ruff_python_ast::identifier::Identifier;
use ruff_python_ast::ParameterWithDefault; use ruff_python_ast::{ParameterWithDefault, Ranged};
use ruff_python_semantic::analyze::visibility::is_staticmethod; use ruff_python_semantic::analyze::visibility::is_staticmethod;
use ruff_python_trivia::{textwrap::dedent, PythonWhitespace}; use ruff_python_trivia::{textwrap::dedent, PythonWhitespace};
use ruff_source_file::NewlineWithTrailingNewline; use ruff_source_file::NewlineWithTrailingNewline;
@ -1640,7 +1640,7 @@ fn common_section(
if checker.patch(diagnostic.kind.rule()) { if checker.patch(diagnostic.kind.rule()) {
// Replace the existing indentation with whitespace of the appropriate length. // Replace the existing indentation with whitespace of the appropriate length.
let content = clean_space(docstring.indentation); let content = clean_space(docstring.indentation);
let fix_range = TextRange::at(context.range().start(), leading_space.text_len()); let fix_range = TextRange::at(context.start(), leading_space.text_len());
diagnostic.set_fix(Fix::automatic(if content.is_empty() { diagnostic.set_fix(Fix::automatic(if content.is_empty() {
Edit::range_deletion(fix_range) Edit::range_deletion(fix_range)
@ -1667,7 +1667,7 @@ fn common_section(
// Add a newline at the beginning of the next section. // Add a newline at the beginning of the next section.
diagnostic.set_fix(Fix::automatic(Edit::insertion( diagnostic.set_fix(Fix::automatic(Edit::insertion(
line_end.to_string(), line_end.to_string(),
next.range().start(), next.start(),
))); )));
} }
checker.diagnostics.push(diagnostic); checker.diagnostics.push(diagnostic);
@ -1684,7 +1684,7 @@ fn common_section(
// Add a newline after the section. // Add a newline after the section.
diagnostic.set_fix(Fix::automatic(Edit::insertion( diagnostic.set_fix(Fix::automatic(Edit::insertion(
format!("{}{}", line_end, docstring.indentation), format!("{}{}", line_end, docstring.indentation),
context.range().end(), context.end(),
))); )));
} }
checker.diagnostics.push(diagnostic); checker.diagnostics.push(diagnostic);
@ -1704,7 +1704,7 @@ fn common_section(
// Add a blank line before the section. // Add a blank line before the section.
diagnostic.set_fix(Fix::automatic(Edit::insertion( diagnostic.set_fix(Fix::automatic(Edit::insertion(
line_end.to_string(), line_end.to_string(),
context.range().start(), context.start(),
))); )));
} }
checker.diagnostics.push(diagnostic); checker.diagnostics.push(diagnostic);

View file

@ -1,5 +1,6 @@
use ruff_diagnostics::{Diagnostic, Violation}; use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;
use crate::docstrings::Docstring; use crate::docstrings::Docstring;

View file

@ -1,5 +1,6 @@
use ruff_diagnostics::{Diagnostic, Violation}; use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_codegen::Quote; use ruff_python_codegen::Quote;
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;

View file

@ -96,10 +96,8 @@ pub(crate) fn remove_exception_handler_assignment(
locator: &Locator, locator: &Locator,
) -> Result<Edit> { ) -> Result<Edit> {
// Lex backwards, to the token just before the `as`. // Lex backwards, to the token just before the `as`.
let mut tokenizer = SimpleTokenizer::up_to_without_back_comment( let mut tokenizer =
bound_exception.range.start(), SimpleTokenizer::up_to_without_back_comment(bound_exception.start(), locator.contents())
locator.contents(),
)
.skip_trivia(); .skip_trivia();
// Eat the `as` token. // Eat the `as` token.
@ -114,7 +112,7 @@ pub(crate) fn remove_exception_handler_assignment(
.context("expected the exception name to be preceded by a token")?; .context("expected the exception name to be preceded by a token")?;
// Lex forwards, to the `:` token. // Lex forwards, to the `:` token.
let following = SimpleTokenizer::starts_at(bound_exception.range.end(), locator.contents()) let following = SimpleTokenizer::starts_at(bound_exception.end(), locator.contents())
.skip_trivia() .skip_trivia()
.next() .next()
.context("expected the exception name to be followed by a colon")?; .context("expected the exception name to be followed by a colon")?;

View file

@ -2,6 +2,7 @@ use std::string::ToString;
use ruff_diagnostics::{Diagnostic, Violation}; use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_semantic::{Scope, ScopeId}; use ruff_python_semantic::{Scope, ScopeId};
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;

View file

@ -1,5 +1,6 @@
use ruff_diagnostics::{Diagnostic, Violation}; use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_semantic::Scope; use ruff_python_semantic::Scope;
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;
@ -44,7 +45,7 @@ pub(crate) fn unused_annotation(
&& !binding.is_used() && !binding.is_used()
&& !checker.settings.dummy_variable_rgx.is_match(name) && !checker.settings.dummy_variable_rgx.is_match(name)
{ {
Some((name.to_string(), binding.range)) Some((name.to_string(), binding.range()))
} else { } else {
None None
} }

View file

@ -5,6 +5,7 @@ use rustc_hash::FxHashMap;
use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, Violation}; use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_semantic::{AnyImport, Exceptions, Imported, Scope, StatementId}; use ruff_python_semantic::{AnyImport, Exceptions, Imported, Scope, StatementId};
use ruff_text_size::TextRange; use ruff_text_size::TextRange;
@ -124,11 +125,11 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
let import = ImportBinding { let import = ImportBinding {
import, import,
range: binding.range, range: binding.range(),
parent_range: binding.parent_range(checker.semantic()), parent_range: binding.parent_range(checker.semantic()),
}; };
if checker.rule_is_ignored(Rule::UnusedImport, import.range.start()) if checker.rule_is_ignored(Rule::UnusedImport, import.start())
|| import.parent_range.is_some_and(|parent_range| { || import.parent_range.is_some_and(|parent_range| {
checker.rule_is_ignored(Rule::UnusedImport, parent_range.start()) checker.rule_is_ignored(Rule::UnusedImport, parent_range.start())
}) })
@ -226,6 +227,12 @@ struct ImportBinding<'a> {
parent_range: Option<TextRange>, parent_range: Option<TextRange>,
} }
impl Ranged for ImportBinding<'_> {
fn range(&self) -> TextRange {
self.range
}
}
/// Generate a [`Fix`] to remove unused imports from a statement. /// Generate a [`Fix`] to remove unused imports from a statement.
fn fix_imports( fn fix_imports(
checker: &Checker, checker: &Checker,

View file

@ -320,7 +320,7 @@ pub(crate) fn unused_variable(checker: &Checker, scope: &Scope, diagnostics: &mu
| "__debuggerskip__" | "__debuggerskip__"
) )
{ {
return Some((name.to_string(), binding.range, binding.source)); return Some((name.to_string(), binding.range(), binding.source));
} }
None None

View file

@ -1,5 +1,6 @@
use ruff_diagnostics::{Diagnostic, Violation}; use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_semantic::Binding; use ruff_python_semantic::Binding;
/// ## What it does /// ## What it does
@ -37,7 +38,7 @@ impl Violation for InvalidAllFormat {
/// PLE0605 /// PLE0605
pub(crate) fn invalid_all_format(binding: &Binding) -> Option<Diagnostic> { pub(crate) fn invalid_all_format(binding: &Binding) -> Option<Diagnostic> {
if binding.is_invalid_all_format() { if binding.is_invalid_all_format() {
Some(Diagnostic::new(InvalidAllFormat, binding.range)) Some(Diagnostic::new(InvalidAllFormat, binding.range()))
} else { } else {
None None
} }

View file

@ -1,5 +1,6 @@
use ruff_diagnostics::{Diagnostic, Violation}; use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::Ranged;
use ruff_python_semantic::Binding; use ruff_python_semantic::Binding;
/// ## What it does /// ## What it does
@ -37,7 +38,7 @@ impl Violation for InvalidAllObject {
/// PLE0604 /// PLE0604
pub(crate) fn invalid_all_object(binding: &Binding) -> Option<Diagnostic> { pub(crate) fn invalid_all_object(binding: &Binding) -> Option<Diagnostic> {
if binding.is_invalid_all_object() { if binding.is_invalid_all_object() {
Some(Diagnostic::new(InvalidAllObject, binding.range)) Some(Diagnostic::new(InvalidAllObject, binding.range()))
} else { } else {
None None
} }

View file

@ -201,10 +201,10 @@ fn fix_py2_block(checker: &Checker, stmt_if: &StmtIf, branch: &IfElifBranch) ->
.elif_else_clauses .elif_else_clauses
.iter() .iter()
.map(Ranged::start) .map(Ranged::start)
.find(|start| *start > branch.range.start()); .find(|start| *start > branch.start());
Some(Fix::suggested(Edit::deletion( Some(Fix::suggested(Edit::deletion(
branch.range.start(), branch.start(),
next_start.unwrap_or(branch.range.end()), next_start.unwrap_or(branch.end()),
))) )))
} }
} }
@ -256,7 +256,7 @@ fn fix_py3_block(checker: &mut Checker, stmt_if: &StmtIf, branch: &IfElifBranch)
.slice(TextRange::new(branch.test.end(), end.end())); .slice(TextRange::new(branch.test.end(), end.end()));
Some(Fix::suggested(Edit::range_replacement( Some(Fix::suggested(Edit::range_replacement(
format!("else{text}"), format!("else{text}"),
TextRange::new(branch.range.start(), stmt_if.end()), TextRange::new(branch.start(), stmt_if.end()),
))) )))
} }
} }

View file

@ -24,6 +24,12 @@ pub struct IfElifBranch<'a> {
pub range: TextRange, pub range: TextRange,
} }
impl Ranged for IfElifBranch<'_> {
fn range(&self) -> TextRange {
self.range
}
}
pub fn if_elif_branches(stmt_if: &StmtIf) -> impl Iterator<Item = IfElifBranch> { pub fn if_elif_branches(stmt_if: &StmtIf) -> impl Iterator<Item = IfElifBranch> {
iter::once(IfElifBranch { iter::once(IfElifBranch {
kind: BranchKind::If, kind: BranchKind::If,

View file

@ -327,7 +327,7 @@ impl Format<PyFormatContext<'_>> for FormatComment<'_> {
} }
let start = slice.start() + start_offset; let start = slice.start() + start_offset;
let end = slice.range().end() - trailing_whitespace_len; let end = slice.end() - trailing_whitespace_len;
write!( write!(
f, f,

View file

@ -445,7 +445,7 @@ fn handle_own_line_comment_between_branches<'a>(
// It depends on the indentation level of the comment if it is a leading comment for the // It depends on the indentation level of the comment if it is a leading comment for the
// following branch or if it a trailing comment of the previous body's last statement. // following branch or if it a trailing comment of the previous body's last statement.
let comment_indentation = indentation_at_offset(comment.slice().range().start(), locator) let comment_indentation = indentation_at_offset(comment.slice().start(), locator)
.unwrap_or_default() .unwrap_or_default()
.len(); .len();
@ -529,7 +529,7 @@ fn handle_own_line_comment_after_branch<'a>(
// We only care about the length because indentations with mixed spaces and tabs are only valid if // We only care about the length because indentations with mixed spaces and tabs are only valid if
// the indent-level doesn't depend on the tab width (the indent level must be the same if the tab width is 1 or 8). // the indent-level doesn't depend on the tab width (the indent level must be the same if the tab width is 1 or 8).
let comment_indentation = indentation_at_offset(comment.slice().range().start(), locator) let comment_indentation = indentation_at_offset(comment.slice().start(), locator)
.unwrap_or_default() .unwrap_or_default()
.len(); .len();
@ -1314,7 +1314,7 @@ fn handle_comprehension_comment<'a>(
// b in c // b in c
// ] // ]
// ``` // ```
if comment.slice().end() < comprehension.target.range().start() { if comment.slice().end() < comprehension.target.start() {
return if is_own_line { return if is_own_line {
// own line comments are correctly assigned as leading the target // own line comments are correctly assigned as leading the target
CommentPlacement::Default(comment) CommentPlacement::Default(comment)
@ -1325,10 +1325,7 @@ fn handle_comprehension_comment<'a>(
} }
let in_token = find_only_token_in_range( let in_token = find_only_token_in_range(
TextRange::new( TextRange::new(comprehension.target.end(), comprehension.iter.start()),
comprehension.target.range().end(),
comprehension.iter.range().start(),
),
SimpleTokenKind::In, SimpleTokenKind::In,
locator, locator,
); );
@ -1361,7 +1358,7 @@ fn handle_comprehension_comment<'a>(
// c // c
// ] // ]
// ``` // ```
if comment.slice().start() < comprehension.iter.range().start() { if comment.slice().start() < comprehension.iter.start() {
return if is_own_line { return if is_own_line {
CommentPlacement::Default(comment) CommentPlacement::Default(comment)
} else { } else {
@ -1370,7 +1367,7 @@ fn handle_comprehension_comment<'a>(
}; };
} }
let mut last_end = comprehension.iter.range().end(); let mut last_end = comprehension.iter.end();
for if_node in &comprehension.ifs { for if_node in &comprehension.ifs {
// ```python // ```python
@ -1391,7 +1388,7 @@ fn handle_comprehension_comment<'a>(
// ] // ]
// ``` // ```
let if_token = find_only_token_in_range( let if_token = find_only_token_in_range(
TextRange::new(last_end, if_node.range().start()), TextRange::new(last_end, if_node.start()),
SimpleTokenKind::If, SimpleTokenKind::If,
locator, locator,
); );
@ -1400,11 +1397,11 @@ fn handle_comprehension_comment<'a>(
return CommentPlacement::dangling(if_node, comment); return CommentPlacement::dangling(if_node, comment);
} }
} else if if_token.start() < comment.slice().start() } else if if_token.start() < comment.slice().start()
&& comment.slice().start() < if_node.range().start() && comment.slice().start() < if_node.start()
{ {
return CommentPlacement::dangling(if_node, comment); return CommentPlacement::dangling(if_node, comment);
} }
last_end = if_node.range().end(); last_end = if_node.end();
} }
CommentPlacement::Default(comment) CommentPlacement::Default(comment)

View file

@ -1,4 +1,4 @@
use ruff_python_ast::{Expr, ExprSlice, ExprUnaryOp, Ranged, UnaryOp}; use ruff_python_ast::{Expr, ExprSlice, ExprUnaryOp, UnaryOp};
use ruff_text_size::TextRange; use ruff_text_size::TextRange;
use ruff_formatter::prelude::{hard_line_break, line_suffix_boundary, space, text}; use ruff_formatter::prelude::{hard_line_break, line_suffix_boundary, space, text};
@ -20,13 +20,18 @@ impl FormatNodeRule<ExprSlice> for FormatExprSlice {
fn fmt_fields(&self, item: &ExprSlice, f: &mut PyFormatter) -> FormatResult<()> { fn fmt_fields(&self, item: &ExprSlice, f: &mut PyFormatter) -> FormatResult<()> {
// `[lower:upper:step]` // `[lower:upper:step]`
let ExprSlice { let ExprSlice {
range,
lower, lower,
upper, upper,
step, step,
range,
} = item; } = item;
let (first_colon, second_colon) = find_colons(f.context().source(), *range, lower, upper)?; let (first_colon, second_colon) = find_colons(
f.context().source(),
*range,
lower.as_deref(),
upper.as_deref(),
)?;
// Handle comment placement // Handle comment placement
// In placements.rs, we marked comment for None nodes a dangling and associated all others // In placements.rs, we marked comment for None nodes a dangling and associated all others
@ -36,14 +41,14 @@ impl FormatNodeRule<ExprSlice> for FormatExprSlice {
let comments = f.context().comments().clone(); let comments = f.context().comments().clone();
let slice_dangling_comments = comments.dangling_comments(item.as_any_node_ref()); let slice_dangling_comments = comments.dangling_comments(item.as_any_node_ref());
// Put the dangling comments (where the nodes are missing) into buckets // Put the dangling comments (where the nodes are missing) into buckets
let first_colon_partition_index = slice_dangling_comments let first_colon_partition_index =
.partition_point(|x| x.slice().start() < first_colon.range.start()); slice_dangling_comments.partition_point(|x| x.slice().start() < first_colon.start());
let (dangling_lower_comments, dangling_upper_step_comments) = let (dangling_lower_comments, dangling_upper_step_comments) =
slice_dangling_comments.split_at(first_colon_partition_index); slice_dangling_comments.split_at(first_colon_partition_index);
let (dangling_upper_comments, dangling_step_comments) = let (dangling_upper_comments, dangling_step_comments) =
if let Some(second_colon) = &second_colon { if let Some(second_colon) = &second_colon {
let second_colon_partition_index = dangling_upper_step_comments let second_colon_partition_index = dangling_upper_step_comments
.partition_point(|x| x.slice().start() < second_colon.range.start()); .partition_point(|x| x.slice().start() < second_colon.start());
dangling_upper_step_comments.split_at(second_colon_partition_index) dangling_upper_step_comments.split_at(second_colon_partition_index)
} else { } else {
// Without a second colon they remaining dangling comments belong between the first // Without a second colon they remaining dangling comments belong between the first
@ -153,27 +158,27 @@ impl FormatNodeRule<ExprSlice> for FormatExprSlice {
pub(crate) fn find_colons( pub(crate) fn find_colons(
contents: &str, contents: &str,
range: TextRange, range: TextRange,
lower: &Option<Box<Expr>>, lower: Option<&Expr>,
upper: &Option<Box<Expr>>, upper: Option<&Expr>,
) -> FormatResult<(SimpleToken, Option<SimpleToken>)> { ) -> FormatResult<(SimpleToken, Option<SimpleToken>)> {
let after_lower = lower let after_lower = lower
.as_ref() .as_ref()
.map_or(range.start(), |lower| lower.range().end()); .map_or(range.start(), ruff_python_ast::Ranged::end);
let mut tokens = SimpleTokenizer::new(contents, TextRange::new(after_lower, range.end())) let mut tokens = SimpleTokenizer::new(contents, TextRange::new(after_lower, range.end()))
.skip_trivia() .skip_trivia()
.skip_while(|token| token.kind == SimpleTokenKind::RParen); .skip_while(|token| token.kind == SimpleTokenKind::RParen);
let first_colon = tokens.next().ok_or(FormatError::syntax_error( let first_colon = tokens.next().ok_or(FormatError::syntax_error(
"Din't find any token for slice first colon", "Didn't find any token for slice first colon",
))?; ))?;
if first_colon.kind != SimpleTokenKind::Colon { if first_colon.kind != SimpleTokenKind::Colon {
return Err(FormatError::syntax_error( return Err(FormatError::syntax_error(
"slice first colon token was not a colon", "Slice first colon token was not a colon",
)); ));
} }
let after_upper = upper let after_upper = upper
.as_ref() .as_ref()
.map_or(first_colon.end(), |upper| upper.range().end()); .map_or(first_colon.end(), ruff_python_ast::Ranged::end);
let mut tokens = SimpleTokenizer::new(contents, TextRange::new(after_upper, range.end())) let mut tokens = SimpleTokenizer::new(contents, TextRange::new(after_upper, range.end()))
.skip_trivia() .skip_trivia()
.skip_while(|token| token.kind == SimpleTokenKind::RParen); .skip_while(|token| token.kind == SimpleTokenKind::RParen);
@ -206,6 +211,7 @@ fn is_simple_expr(expr: &Expr) -> bool {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum ExprSliceCommentSection { pub(crate) enum ExprSliceCommentSection {
Lower, Lower,
Upper, Upper,
@ -229,21 +235,22 @@ pub(crate) fn assign_comment_in_slice(
expr_slice: &ExprSlice, expr_slice: &ExprSlice,
) -> ExprSliceCommentSection { ) -> ExprSliceCommentSection {
let ExprSlice { let ExprSlice {
range,
lower, lower,
upper, upper,
step: _, step: _,
range,
} = expr_slice; } = expr_slice;
let (first_colon, second_colon) = find_colons(contents, *range, lower, upper) let (first_colon, second_colon) =
find_colons(contents, *range, lower.as_deref(), upper.as_deref())
.expect("SyntaxError when trying to parse slice"); .expect("SyntaxError when trying to parse slice");
if comment.start() < first_colon.range.start() { if comment.start() < first_colon.start() {
ExprSliceCommentSection::Lower ExprSliceCommentSection::Lower
} else { } else {
// We are to the right of the first colon // We are to the right of the first colon
if let Some(second_colon) = second_colon { if let Some(second_colon) = second_colon {
if comment.start() < second_colon.range.start() { if comment.start() < second_colon.start() {
ExprSliceCommentSection::Upper ExprSliceCommentSection::Upper
} else { } else {
ExprSliceCommentSection::Step ExprSliceCommentSection::Step

View file

@ -91,7 +91,7 @@ fn is_operand_parenthesized(unary: &ExprUnaryOp, source: &str) -> bool {
UnaryOp::USub => '-'.text_len(), UnaryOp::USub => '-'.text_len(),
}; };
let trivia_range = TextRange::new(unary.range.start() + operator_len, unary.operand.start()); let trivia_range = TextRange::new(unary.start() + operator_len, unary.operand.start());
if let Some(token) = SimpleTokenizer::new(source, trivia_range) if let Some(token) = SimpleTokenizer::new(source, trivia_range)
.skip_trivia() .skip_trivia()

View file

@ -269,6 +269,12 @@ struct FormatStringPart {
is_raw_string: bool, is_raw_string: bool,
} }
impl Ranged for FormatStringPart {
fn range(&self) -> TextRange {
self.range
}
}
impl FormatStringPart { impl FormatStringPart {
fn new(range: TextRange, quoting: Quoting, locator: &Locator, quote_style: QuoteStyle) -> Self { fn new(range: TextRange, quoting: Quoting, locator: &Locator, quote_style: QuoteStyle) -> Self {
let string_content = locator.slice(range); let string_content = locator.slice(range);
@ -317,10 +323,10 @@ impl Format<PyFormatContext<'_>> for FormatStringPart {
write!(f, [self.prefix, self.preferred_quotes])?; write!(f, [self.prefix, self.preferred_quotes])?;
match normalized { match normalized {
Cow::Borrowed(_) => { Cow::Borrowed(_) => {
source_text_slice(self.range, contains_newlines).fmt(f)?; source_text_slice(self.range(), contains_newlines).fmt(f)?;
} }
Cow::Owned(normalized) => { Cow::Owned(normalized) => {
dynamic_text(&normalized, Some(self.range.start())).fmt(f)?; dynamic_text(&normalized, Some(self.start())).fmt(f)?;
} }
} }
self.preferred_quotes.fmt(f) self.preferred_quotes.fmt(f)
@ -781,12 +787,12 @@ fn format_docstring(string_part: &FormatStringPart, f: &mut PyFormatter) -> Form
let locator = f.context().locator(); let locator = f.context().locator();
// Black doesn't change the indentation of docstrings that contain an escaped newline // Black doesn't change the indentation of docstrings that contain an escaped newline
if locator.slice(string_part.range).contains("\\\n") { if locator.slice(string_part.range()).contains("\\\n") {
return string_part.fmt(f); return string_part.fmt(f);
} }
let (normalized, _) = normalize_string( let (normalized, _) = normalize_string(
locator.slice(string_part.range), locator.slice(string_part.range()),
string_part.preferred_quotes, string_part.preferred_quotes,
string_part.is_raw_string, string_part.is_raw_string,
); );
@ -799,13 +805,13 @@ fn format_docstring(string_part: &FormatStringPart, f: &mut PyFormatter) -> Form
write!( write!(
f, f,
[ [
source_position(string_part.range.start()), source_position(string_part.start()),
string_part.prefix, string_part.prefix,
string_part.preferred_quotes string_part.preferred_quotes
] ]
)?; )?;
// We track where in the source docstring we are (in source code byte offsets) // We track where in the source docstring we are (in source code byte offsets)
let mut offset = string_part.range.start(); let mut offset = string_part.start();
// The first line directly after the opening quotes has different rules than the rest, mainly // The first line directly after the opening quotes has different rules than the rest, mainly
// that we remove all leading whitespace as there's no indentation // that we remove all leading whitespace as there's no indentation
@ -892,7 +898,7 @@ fn format_docstring(string_part: &FormatStringPart, f: &mut PyFormatter) -> Form
f, f,
[ [
string_part.preferred_quotes, string_part.preferred_quotes,
source_position(string_part.range.end()) source_position(string_part.end())
] ]
) )
} }

View file

@ -39,7 +39,7 @@ impl FormatNodeRule<Comprehension> for FormatComprehension {
let dangling_item_comments = comments.dangling_comments(item); let dangling_item_comments = comments.dangling_comments(item);
let (before_target_comments, before_in_comments) = dangling_item_comments.split_at( let (before_target_comments, before_in_comments) = dangling_item_comments.split_at(
dangling_item_comments dangling_item_comments
.partition_point(|comment| comment.slice().end() < target.range().start()), .partition_point(|comment| comment.slice().end() < target.start()),
); );
let trailing_in_comments = comments.dangling_comments(iter); let trailing_in_comments = comments.dangling_comments(iter);

View file

@ -79,7 +79,7 @@ fn is_match_case_pattern_parenthesized(
) -> FormatResult<bool> { ) -> FormatResult<bool> {
let mut tokenizer = SimpleTokenizer::new( let mut tokenizer = SimpleTokenizer::new(
context.source(), context.source(),
TextRange::new(case.range().start(), pattern.range().start()), TextRange::new(case.start(), pattern.start()),
) )
.skip_trivia(); .skip_trivia();

View file

@ -416,7 +416,7 @@ pub(crate) fn find_parameter_separators(
debug_assert!(star.kind() == SimpleTokenKind::Star, "{star:?}"); debug_assert!(star.kind() == SimpleTokenKind::Star, "{star:?}");
Some(ParameterSeparator { Some(ParameterSeparator {
preceding_end: parameters.range.start(), preceding_end: parameters.start(),
separator: star.range, separator: star.range,
following_start: first_keyword_argument.start(), following_start: first_keyword_argument.start(),
}) })

View file

@ -6,7 +6,7 @@ use ruff_formatter::write;
use ruff_formatter::FormatResult; use ruff_formatter::FormatResult;
use ruff_python_ast::node::AstNode; use ruff_python_ast::node::AstNode;
use ruff_python_ast::TypeParams; use ruff_python_ast::{Ranged, TypeParams};
#[derive(Default)] #[derive(Default)]
pub struct FormatTypeParams; pub struct FormatTypeParams;
@ -26,7 +26,7 @@ impl FormatNodeRule<TypeParams> for FormatTypeParams {
write!(f, [trailing_comments(dangling_comments)])?; write!(f, [trailing_comments(dangling_comments)])?;
let items = format_with(|f| { let items = format_with(|f| {
f.join_comma_separated(item.range.end()) f.join_comma_separated(item.end())
.nodes(item.type_params.iter()) .nodes(item.type_params.iter())
.finish() .finish()
}); });

View file

@ -66,7 +66,7 @@ pub(crate) fn validate_pos_params(
if let Some(invalid) = first_invalid { if let Some(invalid) = first_invalid {
return Err(LexicalError { return Err(LexicalError {
error: LexicalErrorType::DefaultArgumentError, error: LexicalErrorType::DefaultArgumentError,
location: invalid.parameter.range.start(), location: invalid.parameter.start(),
}); });
} }
Ok(()) Ok(())

View file

@ -318,9 +318,8 @@ impl<'a> StringParser<'a> {
) )
})?; })?;
let leading = let leading =
&expression[..usize::from(value.range().start() - start_location) - 1]; &expression[..usize::from(value.start() - start_location) - 1];
let trailing = let trailing = &expression[usize::from(value.end() - start_location) - 1..];
&expression[usize::from(value.range().end() - start_location) - 1..];
vec![Expr::from(ast::ExprFormattedValue { vec![Expr::from(ast::ExprFormattedValue {
value: Box::new(value), value: Box::new(value),
debug_text: Some(ast::DebugText { debug_text: Some(ast::DebugText {

View file

@ -283,6 +283,12 @@ bitflags! {
} }
} }
impl Ranged for Binding<'_> {
fn range(&self) -> TextRange {
self.range
}
}
/// ID uniquely identifying a [Binding] in a program. /// ID uniquely identifying a [Binding] in a program.
/// ///
/// Using a `u32` to identify [Binding]s should is sufficient because Ruff only supports documents with a /// Using a `u32` to identify [Binding]s should is sufficient because Ruff only supports documents with a

View file

@ -3,6 +3,7 @@ use ruff_text_size::TextRange;
use std::ops::Deref; use std::ops::Deref;
use ruff_index::{newtype_index, IndexSlice, IndexVec}; use ruff_index::{newtype_index, IndexSlice, IndexVec};
use ruff_python_ast::Ranged;
use ruff_source_file::Locator; use ruff_source_file::Locator;
use crate::context::ExecutionContext; use crate::context::ExecutionContext;
@ -26,11 +27,6 @@ impl ResolvedReference {
self.scope_id self.scope_id
} }
/// The range of the reference in the source code.
pub const fn range(&self) -> TextRange {
self.range
}
/// The [`ExecutionContext`] of the reference. /// The [`ExecutionContext`] of the reference.
pub const fn context(&self) -> ExecutionContext { pub const fn context(&self) -> ExecutionContext {
if self.flags.intersects(SemanticModelFlags::TYPING_CONTEXT) { if self.flags.intersects(SemanticModelFlags::TYPING_CONTEXT) {
@ -41,6 +37,13 @@ impl ResolvedReference {
} }
} }
impl Ranged for ResolvedReference {
/// The range of the reference in the source code.
fn range(&self) -> TextRange {
self.range
}
}
/// Id uniquely identifying a read reference in a program. /// Id uniquely identifying a read reference in a program.
#[newtype_index] #[newtype_index]
pub struct ResolvedReferenceId; pub struct ResolvedReferenceId;

View file

@ -87,10 +87,8 @@ impl Strategy for StrategyRemoveModuleMember {
) -> Result<Box<dyn ExactSizeStringIter + 'a>> { ) -> Result<Box<dyn ExactSizeStringIter + 'a>> {
let iter = ast.iter().map(|stmt| { let iter = ast.iter().map(|stmt| {
// trim the newlines the range misses // trim the newlines the range misses
input[..stmt.range().start().to_usize()] input[..stmt.start().to_usize()].trim_end().to_string()
.trim_end() + input[stmt.end().to_usize()..].trim_start()
.to_string()
+ input[stmt.range().end().to_usize()..].trim_start()
}); });
Ok(Box::new(iter)) Ok(Box::new(iter))
} }