Pyupgrade: Turn errors into OSError (#1434)

This commit is contained in:
Colin Delahunty 2022-12-31 21:36:05 +00:00 committed by GitHub
parent 0e8c237167
commit 4ad8db3d61
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
43 changed files with 1196 additions and 274 deletions

1
.gitignore vendored
View file

@ -181,3 +181,4 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear # and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder. # option (not recommended) you can uncomment the following to ignore the entire idea folder.
.idea/ .idea/
.vimspector.json

View file

@ -682,6 +682,7 @@ For more, see [pyupgrade](https://pypi.org/project/pyupgrade/3.2.0/) on PyPI.
| UP021 | ReplaceUniversalNewlines | `universal_newlines` is deprecated, use `text` | 🛠 | | UP021 | ReplaceUniversalNewlines | `universal_newlines` is deprecated, use `text` | 🛠 |
| UP022 | ReplaceStdoutStderr | Sending stdout and stderr to pipe is deprecated, use `capture_output` | 🛠 | | UP022 | ReplaceStdoutStderr | Sending stdout and stderr to pipe is deprecated, use `capture_output` | 🛠 |
| UP023 | RewriteCElementTree | `cElementTree` is deprecated, use `ElementTree` | 🛠 | | UP023 | RewriteCElementTree | `cElementTree` is deprecated, use `ElementTree` | 🛠 |
| UP024 | OSErrorAlias | Replace aliased errors with `OSError` | 🛠 |
| UP025 | RewriteUnicodeLiteral | Remove unicode literals from strings | 🛠 | | UP025 | RewriteUnicodeLiteral | Remove unicode literals from strings | 🛠 |
### pep8-naming (N) ### pep8-naming (N)

View file

@ -0,0 +1,98 @@
import mmap, select, socket
from mmap import error
# These should be fixed
try:
pass
except EnvironmentError:
pass
try:
pass
except IOError:
pass
try:
pass
except WindowsError:
pass
try:
pass
except mmap.error:
pass
try:
pass
except select.error:
pass
try:
pass
except socket.error:
pass
try:
pass
except error:
pass
# Should NOT be in parentheses when replaced
try:
pass
except (IOError,):
pass
try:
pass
except (mmap.error,):
pass
try:
pass
except (EnvironmentError, IOError, OSError, select.error):
pass
# Should be kept in parentheses (because multiple)
try:
pass
except (IOError, KeyError, OSError):
pass
# First should change, second should not
from .mmap import error
try:
pass
except (IOError, error):
pass
# These should not change
from foo import error
try:
pass
except (OSError, error):
pass
try:
pass
except:
pass
try:
pass
except AssertionError:
pass
try:
pass
except (mmap).error:
pass
try:
pass
except OSError:
pass
try:
pass
except (OSError, KeyError):
pass

View file

@ -0,0 +1,17 @@
import mmap, socket, select
try:
pass
except (OSError, mmap.error, IOError):
pass
except (OSError, socket.error, KeyError):
pass
try:
pass
except (
OSError,
select.error,
IOError,
):
pass

View file

@ -0,0 +1,50 @@
# These should not change
raise ValueError
raise ValueError(1)
from .mmap import error
raise error
# Testing the modules
import socket, mmap, select
raise socket.error
raise mmap.error
raise select.error
raise socket.error()
raise mmap.error(1)
raise select.error(1, 2)
raise socket.error(
1,
2,
3,
)
from mmap import error
raise error
from socket import error
raise error(1)
from select import error
raise error(1, 2)
# Testing the names
raise EnvironmentError
raise IOError
raise WindowsError
raise EnvironmentError()
raise IOError(1)
raise WindowsError(1, 2)
raise EnvironmentError(
1,
2,
3,
)
raise WindowsError
raise EnvironmentError(1)
raise IOError(1, 2)

View file

@ -909,6 +909,7 @@
"UP021", "UP021",
"UP022", "UP022",
"UP023", "UP023",
"UP024",
"UP025", "UP025",
"W", "W",
"W2", "W2",

View file

@ -335,20 +335,17 @@ pub fn to_absolute(relative: Location, base: Location) -> Location {
/// Return `true` if a `Stmt` has leading content. /// Return `true` if a `Stmt` has leading content.
pub fn match_leading_content(stmt: &Stmt, locator: &SourceCodeLocator) -> bool { pub fn match_leading_content(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
let range = Range { let range = Range::new(Location::new(stmt.location.row(), 0), stmt.location);
location: Location::new(stmt.location.row(), 0),
end_location: stmt.location,
};
let prefix = locator.slice_source_code_range(&range); let prefix = locator.slice_source_code_range(&range);
prefix.chars().any(|char| !char.is_whitespace()) prefix.chars().any(|char| !char.is_whitespace())
} }
/// Return `true` if a `Stmt` has trailing content. /// Return `true` if a `Stmt` has trailing content.
pub fn match_trailing_content(stmt: &Stmt, locator: &SourceCodeLocator) -> bool { pub fn match_trailing_content(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
let range = Range { let range = Range::new(
location: stmt.end_location.unwrap(), stmt.end_location.unwrap(),
end_location: Location::new(stmt.end_location.unwrap().row() + 1, 0), Location::new(stmt.end_location.unwrap().row() + 1, 0),
}; );
let suffix = locator.slice_source_code_range(&range); let suffix = locator.slice_source_code_range(&range);
for char in suffix.chars() { for char in suffix.chars() {
if char == '#' { if char == '#' {
@ -384,10 +381,7 @@ pub fn identifier_range(stmt: &Stmt, locator: &SourceCodeLocator) -> Range {
let contents = locator.slice_source_code_range(&Range::from_located(stmt)); let contents = locator.slice_source_code_range(&Range::from_located(stmt));
for (start, tok, end) in lexer::make_tokenizer_located(&contents, stmt.location).flatten() { for (start, tok, end) in lexer::make_tokenizer_located(&contents, stmt.location).flatten() {
if matches!(tok, Tok::Name { .. }) { if matches!(tok, Tok::Name { .. }) {
return Range { return Range::new(start, end);
location: start,
end_location: end,
};
} }
} }
error!("Failed to find identifier for {:?}", stmt); error!("Failed to find identifier for {:?}", stmt);
@ -419,20 +413,15 @@ pub fn excepthandler_name_range(
match (name, type_) { match (name, type_) {
(Some(_), Some(type_)) => { (Some(_), Some(type_)) => {
let type_end_location = type_.end_location.unwrap(); let type_end_location = type_.end_location.unwrap();
let contents = locator.slice_source_code_range(&Range { let contents =
location: type_end_location, locator.slice_source_code_range(&Range::new(type_end_location, body[0].location));
end_location: body[0].location,
});
let range = lexer::make_tokenizer_located(&contents, type_end_location) let range = lexer::make_tokenizer_located(&contents, type_end_location)
.flatten() .flatten()
.tuple_windows() .tuple_windows()
.find(|(tok, next_tok)| { .find(|(tok, next_tok)| {
matches!(tok.1, Tok::As) && matches!(next_tok.1, Tok::Name { .. }) matches!(tok.1, Tok::As) && matches!(next_tok.1, Tok::Name { .. })
}) })
.map(|((..), (location, _, end_location))| Range { .map(|((..), (location, _, end_location))| Range::new(location, end_location));
location,
end_location,
});
range range
} }
_ => None, _ => None,
@ -506,10 +495,10 @@ pub fn preceded_by_continuation(stmt: &Stmt, locator: &SourceCodeLocator) -> boo
// make conservative choices. // make conservative choices.
// TODO(charlie): Come up with a more robust strategy. // TODO(charlie): Come up with a more robust strategy.
if stmt.location.row() > 1 { if stmt.location.row() > 1 {
let range = Range { let range = Range::new(
location: Location::new(stmt.location.row() - 1, 0), Location::new(stmt.location.row() - 1, 0),
end_location: Location::new(stmt.location.row(), 0), Location::new(stmt.location.row(), 0),
}; );
let line = locator.slice_source_code_range(&range); let line = locator.slice_source_code_range(&range);
if line.trim().ends_with('\\') { if line.trim().ends_with('\\') {
return true; return true;
@ -733,10 +722,7 @@ y = 2
let locator = SourceCodeLocator::new(contents); let locator = SourceCodeLocator::new(contents);
assert_eq!( assert_eq!(
identifier_range(stmt, &locator), identifier_range(stmt, &locator),
Range { Range::new(Location::new(1, 4), Location::new(1, 5),)
location: Location::new(1, 4),
end_location: Location::new(1, 5),
}
); );
let contents = r#" let contents = r#"
@ -750,10 +736,7 @@ def \
let locator = SourceCodeLocator::new(contents); let locator = SourceCodeLocator::new(contents);
assert_eq!( assert_eq!(
identifier_range(stmt, &locator), identifier_range(stmt, &locator),
Range { Range::new(Location::new(2, 2), Location::new(2, 3),)
location: Location::new(2, 2),
end_location: Location::new(2, 3),
}
); );
let contents = "class Class(): pass".trim(); let contents = "class Class(): pass".trim();
@ -762,10 +745,7 @@ def \
let locator = SourceCodeLocator::new(contents); let locator = SourceCodeLocator::new(contents);
assert_eq!( assert_eq!(
identifier_range(stmt, &locator), identifier_range(stmt, &locator),
Range { Range::new(Location::new(1, 6), Location::new(1, 11),)
location: Location::new(1, 6),
end_location: Location::new(1, 11),
}
); );
let contents = "class Class: pass".trim(); let contents = "class Class: pass".trim();
@ -774,10 +754,7 @@ def \
let locator = SourceCodeLocator::new(contents); let locator = SourceCodeLocator::new(contents);
assert_eq!( assert_eq!(
identifier_range(stmt, &locator), identifier_range(stmt, &locator),
Range { Range::new(Location::new(1, 6), Location::new(1, 11),)
location: Location::new(1, 6),
end_location: Location::new(1, 11),
}
); );
let contents = r#" let contents = r#"
@ -791,10 +768,7 @@ class Class():
let locator = SourceCodeLocator::new(contents); let locator = SourceCodeLocator::new(contents);
assert_eq!( assert_eq!(
identifier_range(stmt, &locator), identifier_range(stmt, &locator),
Range { Range::new(Location::new(2, 6), Location::new(2, 11),)
location: Location::new(2, 6),
end_location: Location::new(2, 11),
}
); );
let contents = r#"x = y + 1"#.trim(); let contents = r#"x = y + 1"#.trim();
@ -803,10 +777,7 @@ class Class():
let locator = SourceCodeLocator::new(contents); let locator = SourceCodeLocator::new(contents);
assert_eq!( assert_eq!(
identifier_range(stmt, &locator), identifier_range(stmt, &locator),
Range { Range::new(Location::new(1, 0), Location::new(1, 9),)
location: Location::new(1, 0),
end_location: Location::new(1, 9),
}
); );
Ok(()) Ok(())

View file

@ -22,12 +22,16 @@ pub struct Range {
} }
impl Range { impl Range {
pub fn from_located<T>(located: &Located<T>) -> Self { pub fn new(location: Location, end_location: Location) -> Self {
Range { Self {
location: located.location, location,
end_location: located.end_location.unwrap(), end_location,
} }
} }
pub fn from_located<T>(located: &Located<T>) -> Self {
Range::new(located.location, located.end_location.unwrap())
}
} }
#[derive(Debug)] #[derive(Debug)]

View file

@ -9,10 +9,10 @@ use crate::checkers::ast::Checker;
/// Extract the leading indentation from a line. /// Extract the leading indentation from a line.
pub fn indentation<'a, T>(checker: &'a Checker, located: &'a Located<T>) -> Cow<'a, str> { pub fn indentation<'a, T>(checker: &'a Checker, located: &'a Located<T>) -> Cow<'a, str> {
let range = Range::from_located(located); let range = Range::from_located(located);
checker.locator.slice_source_code_range(&Range { checker.locator.slice_source_code_range(&Range::new(
location: Location::new(range.location.row(), 0), Location::new(range.location.row(), 0),
end_location: Location::new(range.location.row(), range.location.column()), Location::new(range.location.row(), range.location.column()),
}) ))
} }
/// Extract the leading words from a line of text. /// Extract the leading words from a line of text.

View file

@ -68,10 +68,7 @@ fn apply_fixes<'a>(
} }
// Add all contents from `last_pos` to `fix.location`. // Add all contents from `last_pos` to `fix.location`.
let slice = locator.slice_source_code_range(&Range { let slice = locator.slice_source_code_range(&Range::new(last_pos, fix.location));
location: last_pos,
end_location: fix.location,
});
output.append(&slice); output.append(&slice);
// Add the patch itself. // Add the patch itself.

View file

@ -215,10 +215,10 @@ impl<'a> Checker<'a> {
return false; return false;
} }
let noqa_lineno = self.noqa_line_for.get(&lineno).unwrap_or(&lineno); let noqa_lineno = self.noqa_line_for.get(&lineno).unwrap_or(&lineno);
let line = self.locator.slice_source_code_range(&Range { let line = self.locator.slice_source_code_range(&Range::new(
location: Location::new(*noqa_lineno, 0), Location::new(*noqa_lineno, 0),
end_location: Location::new(noqa_lineno + 1, 0), Location::new(noqa_lineno + 1, 0),
}); ));
match noqa::extract_noqa_directive(&line) { match noqa::extract_noqa_directive(&line) {
Directive::None => false, Directive::None => false,
Directive::All(..) => true, Directive::All(..) => true,
@ -1107,6 +1107,11 @@ where
flake8_errmsg::plugins::string_in_exception(self, exc); flake8_errmsg::plugins::string_in_exception(self, exc);
} }
} }
if self.settings.enabled.contains(&CheckCode::UP024) {
if let Some(item) = exc {
pyupgrade::plugins::os_error_alias(self, item);
}
}
} }
StmtKind::AugAssign { target, .. } => { StmtKind::AugAssign { target, .. } => {
self.handle_node_load(target); self.handle_node_load(target);
@ -1191,6 +1196,9 @@ where
if self.settings.enabled.contains(&CheckCode::B013) { if self.settings.enabled.contains(&CheckCode::B013) {
flake8_bugbear::plugins::redundant_tuple_in_exception_handler(self, handlers); flake8_bugbear::plugins::redundant_tuple_in_exception_handler(self, handlers);
} }
if self.settings.enabled.contains(&CheckCode::UP024) {
pyupgrade::plugins::os_error_alias(self, handlers);
}
} }
StmtKind::Assign { targets, value, .. } => { StmtKind::Assign { targets, value, .. } => {
if self.settings.enabled.contains(&CheckCode::E731) { if self.settings.enabled.contains(&CheckCode::E731) {
@ -1716,6 +1724,9 @@ where
if self.settings.enabled.contains(&CheckCode::UP022) { if self.settings.enabled.contains(&CheckCode::UP022) {
pyupgrade::plugins::replace_stdout_stderr(self, expr, keywords); pyupgrade::plugins::replace_stdout_stderr(self, expr, keywords);
} }
if self.settings.enabled.contains(&CheckCode::UP024) {
pyupgrade::plugins::os_error_alias(self, expr);
}
// flake8-print // flake8-print
if self.settings.enabled.contains(&CheckCode::T201) if self.settings.enabled.contains(&CheckCode::T201)
@ -3869,10 +3880,10 @@ impl<'a> Checker<'a> {
let content = self let content = self
.locator .locator
.slice_source_code_range(&Range::from_located(expr)); .slice_source_code_range(&Range::from_located(expr));
let indentation = self.locator.slice_source_code_range(&Range { let indentation = self.locator.slice_source_code_range(&Range::new(
location: Location::new(expr.location.row(), 0), Location::new(expr.location.row(), 0),
end_location: Location::new(expr.location.row(), expr.location.column()), Location::new(expr.location.row(), expr.location.column()),
}); ));
let body = pydocstyle::helpers::raw_contents(&content); let body = pydocstyle::helpers::raw_contents(&content);
let docstring = Docstring { let docstring = Docstring {
kind: definition.kind, kind: definition.kind,

View file

@ -102,10 +102,7 @@ pub fn check_noqa(
if matches.is_empty() { if matches.is_empty() {
let mut check = Check::new( let mut check = Check::new(
CheckKind::UnusedNOQA(None), CheckKind::UnusedNOQA(None),
Range { Range::new(Location::new(row + 1, start), Location::new(row + 1, end)),
location: Location::new(row + 1, start),
end_location: Location::new(row + 1, end),
},
); );
if matches!(autofix, flags::Autofix::Enabled) if matches!(autofix, flags::Autofix::Enabled)
&& settings.fixable.contains(check.kind.code()) && settings.fixable.contains(check.kind.code())
@ -169,10 +166,7 @@ pub fn check_noqa(
.map(|code| (*code).to_string()) .map(|code| (*code).to_string())
.collect(), .collect(),
})), })),
Range { Range::new(Location::new(row + 1, start), Location::new(row + 1, end)),
location: Location::new(row + 1, start),
end_location: Location::new(row + 1, end),
},
); );
if matches!(autofix, flags::Autofix::Enabled) if matches!(autofix, flags::Autofix::Enabled)
&& settings.fixable.contains(check.kind.code()) && settings.fixable.contains(check.kind.code())

View file

@ -236,6 +236,7 @@ pub enum CheckCode {
UP021, UP021,
UP022, UP022,
UP023, UP023,
UP024,
UP025, UP025,
// pydocstyle // pydocstyle
D100, D100,
@ -907,6 +908,7 @@ pub enum CheckKind {
ReplaceUniversalNewlines, ReplaceUniversalNewlines,
ReplaceStdoutStderr, ReplaceStdoutStderr,
RewriteCElementTree, RewriteCElementTree,
OSErrorAlias(Option<String>),
RewriteUnicodeLiteral, RewriteUnicodeLiteral,
// pydocstyle // pydocstyle
BlankLineAfterLastSection(String), BlankLineAfterLastSection(String),
@ -1301,6 +1303,7 @@ impl CheckCode {
CheckCode::UP021 => CheckKind::ReplaceUniversalNewlines, CheckCode::UP021 => CheckKind::ReplaceUniversalNewlines,
CheckCode::UP022 => CheckKind::ReplaceStdoutStderr, CheckCode::UP022 => CheckKind::ReplaceStdoutStderr,
CheckCode::UP023 => CheckKind::RewriteCElementTree, CheckCode::UP023 => CheckKind::RewriteCElementTree,
CheckCode::UP024 => CheckKind::OSErrorAlias(None),
CheckCode::UP025 => CheckKind::RewriteUnicodeLiteral, CheckCode::UP025 => CheckKind::RewriteUnicodeLiteral,
// pydocstyle // pydocstyle
CheckCode::D100 => CheckKind::PublicModule, CheckCode::D100 => CheckKind::PublicModule,
@ -1733,6 +1736,7 @@ impl CheckCode {
CheckCode::UP021 => CheckCategory::Pyupgrade, CheckCode::UP021 => CheckCategory::Pyupgrade,
CheckCode::UP022 => CheckCategory::Pyupgrade, CheckCode::UP022 => CheckCategory::Pyupgrade,
CheckCode::UP023 => CheckCategory::Pyupgrade, CheckCode::UP023 => CheckCategory::Pyupgrade,
CheckCode::UP024 => CheckCategory::Pyupgrade,
CheckCode::UP025 => CheckCategory::Pyupgrade, CheckCode::UP025 => CheckCategory::Pyupgrade,
CheckCode::W292 => CheckCategory::Pycodestyle, CheckCode::W292 => CheckCategory::Pycodestyle,
CheckCode::W605 => CheckCategory::Pycodestyle, CheckCode::W605 => CheckCategory::Pycodestyle,
@ -1955,6 +1959,7 @@ impl CheckKind {
CheckKind::ReplaceUniversalNewlines => &CheckCode::UP021, CheckKind::ReplaceUniversalNewlines => &CheckCode::UP021,
CheckKind::ReplaceStdoutStderr => &CheckCode::UP022, CheckKind::ReplaceStdoutStderr => &CheckCode::UP022,
CheckKind::RewriteCElementTree => &CheckCode::UP023, CheckKind::RewriteCElementTree => &CheckCode::UP023,
CheckKind::OSErrorAlias(..) => &CheckCode::UP024,
CheckKind::RewriteUnicodeLiteral => &CheckCode::UP025, CheckKind::RewriteUnicodeLiteral => &CheckCode::UP025,
// pydocstyle // pydocstyle
CheckKind::BlankLineAfterLastSection(..) => &CheckCode::D413, CheckKind::BlankLineAfterLastSection(..) => &CheckCode::D413,
@ -2715,6 +2720,7 @@ impl CheckKind {
CheckKind::RewriteCElementTree => { CheckKind::RewriteCElementTree => {
"`cElementTree` is deprecated, use `ElementTree`".to_string() "`cElementTree` is deprecated, use `ElementTree`".to_string()
} }
CheckKind::OSErrorAlias(..) => "Replace aliased errors with `OSError`".to_string(),
CheckKind::RewriteUnicodeLiteral => "Remove unicode literals from strings".to_string(), CheckKind::RewriteUnicodeLiteral => "Remove unicode literals from strings".to_string(),
// pydocstyle // pydocstyle
CheckKind::FitsOnOneLine => "One-line docstring should fit on one line".to_string(), CheckKind::FitsOnOneLine => "One-line docstring should fit on one line".to_string(),
@ -3129,8 +3135,8 @@ impl CheckKind {
matches!( matches!(
self, self,
CheckKind::AmbiguousUnicodeCharacterString(..) CheckKind::AmbiguousUnicodeCharacterString(..)
| CheckKind::AmbiguousUnicodeCharacterDocstring(..)
| CheckKind::AmbiguousUnicodeCharacterComment(..) | CheckKind::AmbiguousUnicodeCharacterComment(..)
| CheckKind::AmbiguousUnicodeCharacterDocstring(..)
| CheckKind::BlankLineAfterLastSection(..) | CheckKind::BlankLineAfterLastSection(..)
| CheckKind::BlankLineAfterSection(..) | CheckKind::BlankLineAfterSection(..)
| CheckKind::BlankLineAfterSummary | CheckKind::BlankLineAfterSummary
@ -3156,12 +3162,7 @@ impl CheckKind {
| CheckKind::MisplacedComparisonConstant(..) | CheckKind::MisplacedComparisonConstant(..)
| CheckKind::MissingReturnTypeSpecialMethod(..) | CheckKind::MissingReturnTypeSpecialMethod(..)
| CheckKind::NativeLiterals(..) | CheckKind::NativeLiterals(..)
| CheckKind::OpenAlias
| CheckKind::NewLineAfterLastParagraph | CheckKind::NewLineAfterLastParagraph
| CheckKind::ReplaceUniversalNewlines
| CheckKind::ReplaceStdoutStderr
| CheckKind::RewriteCElementTree
| CheckKind::RewriteUnicodeLiteral
| CheckKind::NewLineAfterSectionName(..) | CheckKind::NewLineAfterSectionName(..)
| CheckKind::NoBlankLineAfterFunction(..) | CheckKind::NoBlankLineAfterFunction(..)
| CheckKind::NoBlankLineBeforeClass(..) | CheckKind::NoBlankLineBeforeClass(..)
@ -3174,16 +3175,22 @@ impl CheckKind {
| CheckKind::NoneComparison(..) | CheckKind::NoneComparison(..)
| CheckKind::NotInTest | CheckKind::NotInTest
| CheckKind::NotIsTest | CheckKind::NotIsTest
| CheckKind::OSErrorAlias(..)
| CheckKind::OneBlankLineAfterClass(..) | CheckKind::OneBlankLineAfterClass(..)
| CheckKind::OneBlankLineBeforeClass(..) | CheckKind::OneBlankLineBeforeClass(..)
| CheckKind::OpenAlias
| CheckKind::PEP3120UnnecessaryCodingComment | CheckKind::PEP3120UnnecessaryCodingComment
| CheckKind::PPrintFound | CheckKind::PPrintFound
| CheckKind::PrintFound
| CheckKind::PercentFormatExtraNamedArguments(..) | CheckKind::PercentFormatExtraNamedArguments(..)
| CheckKind::PrintFound
| CheckKind::RaiseNotImplemented | CheckKind::RaiseNotImplemented
| CheckKind::RedundantOpenModes(..) | CheckKind::RedundantOpenModes(..)
| CheckKind::RedundantTupleInExceptionHandler(..) | CheckKind::RedundantTupleInExceptionHandler(..)
| CheckKind::RemoveSixCompat | CheckKind::RemoveSixCompat
| CheckKind::ReplaceStdoutStderr
| CheckKind::ReplaceUniversalNewlines
| CheckKind::RewriteCElementTree
| CheckKind::RewriteUnicodeLiteral
| CheckKind::SectionNameEndsInColon(..) | CheckKind::SectionNameEndsInColon(..)
| CheckKind::SectionNotOverIndented(..) | CheckKind::SectionNotOverIndented(..)
| CheckKind::SectionUnderlineAfterName(..) | CheckKind::SectionUnderlineAfterName(..)
@ -3313,6 +3320,10 @@ impl CheckKind {
CheckKind::OneBlankLineAfterClass(..) => { CheckKind::OneBlankLineAfterClass(..) => {
Some("Insert 1 blank line after class docstring".to_string()) Some("Insert 1 blank line after class docstring".to_string())
} }
CheckKind::OSErrorAlias(name) => Some(match name {
None => "Replace with builtin `OSError`".to_string(),
Some(name) => format!("Replace `{name}` with builtin `OSError`"),
}),
CheckKind::NoBlankLinesBetweenHeaderAndContent(..) => { CheckKind::NoBlankLinesBetweenHeaderAndContent(..) => {
Some("Remove blank line(s)".to_string()) Some("Remove blank line(s)".to_string())
} }

View file

@ -540,6 +540,7 @@ pub enum CheckCodePrefix {
UP021, UP021,
UP022, UP022,
UP023, UP023,
UP024,
UP025, UP025,
W, W,
W2, W2,
@ -773,6 +774,7 @@ impl CheckCodePrefix {
CheckCode::UP021, CheckCode::UP021,
CheckCode::UP022, CheckCode::UP022,
CheckCode::UP023, CheckCode::UP023,
CheckCode::UP024,
CheckCode::UP025, CheckCode::UP025,
CheckCode::D100, CheckCode::D100,
CheckCode::D101, CheckCode::D101,
@ -2452,6 +2454,7 @@ impl CheckCodePrefix {
CheckCode::UP021, CheckCode::UP021,
CheckCode::UP022, CheckCode::UP022,
CheckCode::UP023, CheckCode::UP023,
CheckCode::UP024,
CheckCode::UP025, CheckCode::UP025,
] ]
} }
@ -2485,6 +2488,7 @@ impl CheckCodePrefix {
CheckCode::UP021, CheckCode::UP021,
CheckCode::UP022, CheckCode::UP022,
CheckCode::UP023, CheckCode::UP023,
CheckCode::UP024,
CheckCode::UP025, CheckCode::UP025,
] ]
} }
@ -2702,6 +2706,7 @@ impl CheckCodePrefix {
CheckCode::UP021, CheckCode::UP021,
CheckCode::UP022, CheckCode::UP022,
CheckCode::UP023, CheckCode::UP023,
CheckCode::UP024,
CheckCode::UP025, CheckCode::UP025,
], ],
CheckCodePrefix::UP0 => vec![ CheckCodePrefix::UP0 => vec![
@ -2727,6 +2732,7 @@ impl CheckCodePrefix {
CheckCode::UP021, CheckCode::UP021,
CheckCode::UP022, CheckCode::UP022,
CheckCode::UP023, CheckCode::UP023,
CheckCode::UP024,
CheckCode::UP025, CheckCode::UP025,
], ],
CheckCodePrefix::UP00 => vec![ CheckCodePrefix::UP00 => vec![
@ -2774,12 +2780,14 @@ impl CheckCodePrefix {
CheckCode::UP021, CheckCode::UP021,
CheckCode::UP022, CheckCode::UP022,
CheckCode::UP023, CheckCode::UP023,
CheckCode::UP024,
CheckCode::UP025, CheckCode::UP025,
], ],
CheckCodePrefix::UP020 => vec![CheckCode::UP020], CheckCodePrefix::UP020 => vec![CheckCode::UP020],
CheckCodePrefix::UP021 => vec![CheckCode::UP021], CheckCodePrefix::UP021 => vec![CheckCode::UP021],
CheckCodePrefix::UP022 => vec![CheckCode::UP022], CheckCodePrefix::UP022 => vec![CheckCode::UP022],
CheckCodePrefix::UP023 => vec![CheckCode::UP023], CheckCodePrefix::UP023 => vec![CheckCode::UP023],
CheckCodePrefix::UP024 => vec![CheckCode::UP024],
CheckCodePrefix::UP025 => vec![CheckCode::UP025], CheckCodePrefix::UP025 => vec![CheckCode::UP025],
CheckCodePrefix::W => vec![CheckCode::W292, CheckCode::W605], CheckCodePrefix::W => vec![CheckCode::W292, CheckCode::W605],
CheckCodePrefix::W2 => vec![CheckCode::W292], CheckCodePrefix::W2 => vec![CheckCode::W292],
@ -3352,6 +3360,7 @@ impl CheckCodePrefix {
CheckCodePrefix::UP021 => SuffixLength::Three, CheckCodePrefix::UP021 => SuffixLength::Three,
CheckCodePrefix::UP022 => SuffixLength::Three, CheckCodePrefix::UP022 => SuffixLength::Three,
CheckCodePrefix::UP023 => SuffixLength::Three, CheckCodePrefix::UP023 => SuffixLength::Three,
CheckCodePrefix::UP024 => SuffixLength::Three,
CheckCodePrefix::UP025 => SuffixLength::Three, CheckCodePrefix::UP025 => SuffixLength::Three,
CheckCodePrefix::W => SuffixLength::Zero, CheckCodePrefix::W => SuffixLength::Zero,
CheckCodePrefix::W2 => SuffixLength::One, CheckCodePrefix::W2 => SuffixLength::One,

View file

@ -110,10 +110,7 @@ pub fn extract_isort_directives(lxr: &[LexResult], locator: &SourceCodeLocator)
} }
// TODO(charlie): Modify RustPython to include the comment text in the token. // TODO(charlie): Modify RustPython to include the comment text in the token.
let comment_text = locator.slice_source_code_range(&Range { let comment_text = locator.slice_source_code_range(&Range::new(start, end));
location: start,
end_location: end,
});
if comment_text == "# isort: split" { if comment_text == "# isort: split" {
splits.push(start.row()); splits.push(start.row());

View file

@ -1,4 +1,4 @@
// See: https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals. /// See: <https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals>
pub const TRIPLE_QUOTE_PREFIXES: &[&str] = &[ pub const TRIPLE_QUOTE_PREFIXES: &[&str] = &[
"u\"\"\"", "u'''", "r\"\"\"", "r'''", "U\"\"\"", "U'''", "R\"\"\"", "R'''", "\"\"\"", "'''", "u\"\"\"", "u'''", "r\"\"\"", "r'''", "U\"\"\"", "U'''", "R\"\"\"", "R'''", "\"\"\"", "'''",

View file

@ -28,20 +28,11 @@ pub fn commented_out_code(
) -> Option<Check> { ) -> Option<Check> {
let location = Location::new(start.row(), 0); let location = Location::new(start.row(), 0);
let end_location = Location::new(end.row() + 1, 0); let end_location = Location::new(end.row() + 1, 0);
let line = locator.slice_source_code_range(&Range { let line = locator.slice_source_code_range(&Range::new(location, end_location));
location,
end_location,
});
// Verify that the comment is on its own line, and that it contains code. // Verify that the comment is on its own line, and that it contains code.
if is_standalone_comment(&line) && comment_contains_code(&line) { if is_standalone_comment(&line) && comment_contains_code(&line) {
let mut check = Check::new( let mut check = Check::new(CheckKind::CommentedOutCode, Range::new(start, end));
CheckKind::CommentedOutCode,
Range {
location: start,
end_location: end,
},
);
if matches!(autofix, flags::Autofix::Enabled) if matches!(autofix, flags::Autofix::Enabled)
&& settings.fixable.contains(&CheckCode::ERA001) && settings.fixable.contains(&CheckCode::ERA001)
{ {

View file

@ -47,10 +47,7 @@ pub fn quotes(
is_docstring: bool, is_docstring: bool,
settings: &Settings, settings: &Settings,
) -> Option<Check> { ) -> Option<Check> {
let text = locator.slice_source_code_range(&Range { let text = locator.slice_source_code_range(&Range::new(start, end));
location: start,
end_location: end,
});
// Remove any prefixes (e.g., remove `u` from `u"foo"`). // Remove any prefixes (e.g., remove `u` from `u"foo"`).
let last_quote_char = text.chars().last().unwrap(); let last_quote_char = text.chars().last().unwrap();
@ -76,10 +73,7 @@ pub fn quotes(
Some(Check::new( Some(Check::new(
CheckKind::BadQuotesDocstring(settings.docstring_quotes.clone()), CheckKind::BadQuotesDocstring(settings.docstring_quotes.clone()),
Range { Range::new(start, end),
location: start,
end_location: end,
},
)) ))
} else if is_multiline { } else if is_multiline {
// If our string is or contains a known good string, ignore it. // If our string is or contains a known good string, ignore it.
@ -94,10 +88,7 @@ pub fn quotes(
Some(Check::new( Some(Check::new(
CheckKind::BadQuotesMultilineString(settings.multiline_quotes.clone()), CheckKind::BadQuotesMultilineString(settings.multiline_quotes.clone()),
Range { Range::new(start, end),
location: start,
end_location: end,
},
)) ))
} else { } else {
let string_contents = &raw_text[1..raw_text.len() - 1]; let string_contents = &raw_text[1..raw_text.len() - 1];
@ -112,10 +103,7 @@ pub fn quotes(
{ {
return Some(Check::new( return Some(Check::new(
CheckKind::AvoidQuoteEscape, CheckKind::AvoidQuoteEscape,
Range { Range::new(start, end),
location: start,
end_location: end,
},
)); ));
} }
return None; return None;
@ -125,10 +113,7 @@ pub fn quotes(
if !string_contents.contains(good_single(&settings.inline_quotes)) { if !string_contents.contains(good_single(&settings.inline_quotes)) {
return Some(Check::new( return Some(Check::new(
CheckKind::BadQuotesInlineString(settings.inline_quotes.clone()), CheckKind::BadQuotesInlineString(settings.inline_quotes.clone()),
Range { Range::new(start, end),
location: start,
end_location: end,
},
)); ));
} }

View file

@ -46,10 +46,7 @@ pub fn key_in_dict_for(checker: &mut Checker, target: &Expr, iter: &Expr) {
checker, checker,
target, target,
iter, iter,
Range { Range::new(target.location, iter.end_location.unwrap()),
location: target.location,
end_location: iter.end_location.unwrap(),
},
); );
} }

View file

@ -22,10 +22,7 @@ pub fn collect_comments<'a>(range: &Range, locator: &'a SourceCodeLocator) -> Ve
.filter_map(|(start, tok, end)| { .filter_map(|(start, tok, end)| {
if matches!(tok, Tok::Comment) { if matches!(tok, Tok::Comment) {
Some(Comment { Some(Comment {
value: locator.slice_source_code_range(&Range { value: locator.slice_source_code_range(&Range::new(start, end)),
location: start,
end_location: end,
}),
location: start, location: start,
end_location: end, end_location: end,
}) })

View file

@ -19,18 +19,12 @@ use crate::{Check, Settings, SourceCodeLocator};
fn extract_range(body: &[&Stmt]) -> Range { fn extract_range(body: &[&Stmt]) -> Range {
let location = body.first().unwrap().location; let location = body.first().unwrap().location;
let end_location = body.last().unwrap().end_location.unwrap(); let end_location = body.last().unwrap().end_location.unwrap();
Range { Range::new(location, end_location)
location,
end_location,
}
} }
fn extract_indentation_range(body: &[&Stmt]) -> Range { fn extract_indentation_range(body: &[&Stmt]) -> Range {
let location = body.first().unwrap().location; let location = body.first().unwrap().location;
Range { Range::new(Location::new(location.row(), 0), location)
location: Location::new(location.row(), 0),
end_location: location,
}
} }
/// I001 /// I001
@ -57,10 +51,10 @@ pub fn check_imports(
// Extract comments. Take care to grab any inline comments from the last line. // Extract comments. Take care to grab any inline comments from the last line.
let comments = comments::collect_comments( let comments = comments::collect_comments(
&Range { &Range::new(
location: range.location, range.location,
end_location: Location::new(range.end_location.row() + 1, 0), Location::new(range.end_location.row() + 1, 0),
}, ),
locator, locator,
); );
@ -90,10 +84,10 @@ pub fn check_imports(
); );
// Expand the span the entire range, including leading and trailing space. // Expand the span the entire range, including leading and trailing space.
let range = Range { let range = Range::new(
location: Location::new(range.location.row(), 0), Location::new(range.location.row(), 0),
end_location: Location::new(range.end_location.row() + 1 + num_trailing_lines, 0), Location::new(range.end_location.row() + 1 + num_trailing_lines, 0),
}; );
let actual = dedent(&locator.slice_source_code_range(&range)); let actual = dedent(&locator.slice_source_code_range(&range));
if actual == dedent(&expected) { if actual == dedent(&expected) {
None None

View file

@ -120,10 +120,7 @@ pub(crate) fn check_path(
if settings.enabled.contains(&CheckCode::E999) { if settings.enabled.contains(&CheckCode::E999) {
checks.push(Check::new( checks.push(Check::new(
CheckKind::SyntaxError(parse_error.error.to_string()), CheckKind::SyntaxError(parse_error.error.to_string()),
Range { Range::new(parse_error.location, parse_error.location),
location: parse_error.location,
end_location: parse_error.location,
},
)); ));
} }
} }

View file

@ -55,22 +55,18 @@ pub struct Source {
impl Source { impl Source {
pub fn from_check(check: &Check, locator: &SourceCodeLocator) -> Self { pub fn from_check(check: &Check, locator: &SourceCodeLocator) -> Self {
let source = locator.slice_source_code_range(&Range { let location = Location::new(check.location.row(), 0);
location: Location::new(check.location.row(), 0), // Checks can already extend one-past-the-end per Ropey's semantics. If they do,
// Checks can already extend one-past-the-end per Ropey's semantics. If they do, though, // though, then they'll end at the start of a line. We need to avoid
// then they'll end at the start of a line. We need to avoid extending by yet another // extending by yet another line past-the-end.
// line past-the-end. let end_location = if check.end_location.column() == 0 {
end_location: if check.end_location.column() == 0 {
check.end_location check.end_location
} else { } else {
Location::new(check.end_location.row() + 1, 0) Location::new(check.end_location.row() + 1, 0)
}, };
}); let source = locator.slice_source_code_range(&Range::new(location, end_location));
let num_chars_in_range = locator let num_chars_in_range = locator
.slice_source_code_range(&Range { .slice_source_code_range(&Range::new(check.location, check.end_location))
location: check.location,
end_location: check.end_location,
})
.chars() .chars()
.count(); .count();
Source { Source {

View file

@ -248,10 +248,7 @@ mod tests {
let checks = vec![Check::new( let checks = vec![Check::new(
CheckKind::UnusedVariable("x".to_string()), CheckKind::UnusedVariable("x".to_string()),
Range { Range::new(Location::new(1, 0), Location::new(1, 0)),
location: Location::new(1, 0),
end_location: Location::new(1, 0),
},
)]; )];
let contents = "x = 1"; let contents = "x = 1";
let noqa_line_for = IntMap::default(); let noqa_line_for = IntMap::default();
@ -269,17 +266,11 @@ mod tests {
let checks = vec![ let checks = vec![
Check::new( Check::new(
CheckKind::AmbiguousVariableName("x".to_string()), CheckKind::AmbiguousVariableName("x".to_string()),
Range { Range::new(Location::new(1, 0), Location::new(1, 0)),
location: Location::new(1, 0),
end_location: Location::new(1, 0),
},
), ),
Check::new( Check::new(
CheckKind::UnusedVariable("x".to_string()), CheckKind::UnusedVariable("x".to_string()),
Range { Range::new(Location::new(1, 0), Location::new(1, 0)),
location: Location::new(1, 0),
end_location: Location::new(1, 0),
},
), ),
]; ];
let contents = "x = 1 # noqa: E741\n"; let contents = "x = 1 # noqa: E741\n";
@ -298,17 +289,11 @@ mod tests {
let checks = vec![ let checks = vec![
Check::new( Check::new(
CheckKind::AmbiguousVariableName("x".to_string()), CheckKind::AmbiguousVariableName("x".to_string()),
Range { Range::new(Location::new(1, 0), Location::new(1, 0)),
location: Location::new(1, 0),
end_location: Location::new(1, 0),
},
), ),
Check::new( Check::new(
CheckKind::UnusedVariable("x".to_string()), CheckKind::UnusedVariable("x".to_string()),
Range { Range::new(Location::new(1, 0), Location::new(1, 0)),
location: Location::new(1, 0),
end_location: Location::new(1, 0),
},
), ),
]; ];
let contents = "x = 1 # noqa"; let contents = "x = 1 # noqa";

View file

@ -34,10 +34,10 @@ pub fn line_too_long(lineno: usize, line: &str, max_line_length: usize) -> Optio
Some(Check::new( Some(Check::new(
CheckKind::LineTooLong(line_length, max_line_length), CheckKind::LineTooLong(line_length, max_line_length),
Range { Range::new(
location: Location::new(lineno + 1, max_line_length), Location::new(lineno + 1, max_line_length),
end_location: Location::new(lineno + 1, line_length), Location::new(lineno + 1, line_length),
}, ),
)) ))
} }
@ -162,10 +162,7 @@ pub fn no_newline_at_end_of_file(contents: &str, autofix: bool) -> Option<Check>
let location = Location::new(contents.lines().count(), line.len()); let location = Location::new(contents.lines().count(), line.len());
let mut check = Check::new( let mut check = Check::new(
CheckKind::NoNewLineAtEndOfFile, CheckKind::NoNewLineAtEndOfFile,
Range { Range::new(location, location),
location,
end_location: location,
},
); );
if autofix { if autofix {
check.amend(Fix::insertion("\n".to_string(), location)); check.amend(Fix::insertion("\n".to_string(), location));
@ -203,10 +200,7 @@ pub fn invalid_escape_sequence(
) -> Vec<Check> { ) -> Vec<Check> {
let mut checks = vec![]; let mut checks = vec![];
let text = locator.slice_source_code_range(&Range { let text = locator.slice_source_code_range(&Range::new(start, end));
location: start,
end_location: end,
});
// Determine whether the string is single- or triple-quoted. // Determine whether the string is single- or triple-quoted.
let quote = extract_quote(&text); let quote = extract_quote(&text);
@ -249,10 +243,7 @@ pub fn invalid_escape_sequence(
let end_location = Location::new(location.row(), location.column() + 2); let end_location = Location::new(location.row(), location.column() + 2);
let mut check = Check::new( let mut check = Check::new(
CheckKind::InvalidEscapeSequence(next_char), CheckKind::InvalidEscapeSequence(next_char),
Range { Range::new(location, end_location),
location,
end_location,
},
); );
if autofix { if autofix {
check.amend(Fix::insertion(r"\".to_string(), location)); check.amend(Fix::insertion(r"\".to_string(), location));

View file

@ -329,10 +329,10 @@ pub fn do_not_assign_lambda(checker: &mut Checker, target: &Expr, value: &Expr,
{ {
match function(id, args, body, checker.style) { match function(id, args, body, checker.style) {
Ok(content) => { Ok(content) => {
let first_line = checker.locator.slice_source_code_range(&Range { let first_line = checker.locator.slice_source_code_range(&Range::new(
location: Location::new(stmt.location.row(), 0), Location::new(stmt.location.row(), 0),
end_location: Location::new(stmt.location.row() + 1, 0), Location::new(stmt.location.row() + 1, 0),
}); ));
let indentation = &leading_space(&first_line); let indentation = &leading_space(&first_line);
let mut indented = String::new(); let mut indented = String::new();
for (idx, line) in content.lines().enumerate() { for (idx, line) in content.lines().enumerate() {

View file

@ -34,10 +34,7 @@ pub fn not_missing(
if checker.settings.enabled.contains(&CheckCode::D100) { if checker.settings.enabled.contains(&CheckCode::D100) {
checker.add_check(Check::new( checker.add_check(Check::new(
CheckKind::PublicModule, CheckKind::PublicModule,
Range { Range::new(Location::new(1, 0), Location::new(1, 0)),
location: Location::new(1, 0),
end_location: Location::new(1, 0),
},
)); ));
} }
false false
@ -46,10 +43,7 @@ pub fn not_missing(
if checker.settings.enabled.contains(&CheckCode::D104) { if checker.settings.enabled.contains(&CheckCode::D104) {
checker.add_check(Check::new( checker.add_check(Check::new(
CheckKind::PublicPackage, CheckKind::PublicPackage,
Range { Range::new(Location::new(1, 0), Location::new(1, 0)),
location: Location::new(1, 0),
end_location: Location::new(1, 0),
},
)); ));
} }
false false
@ -412,10 +406,10 @@ pub fn indent(checker: &mut Checker, docstring: &Docstring) {
{ {
let mut check = Check::new( let mut check = Check::new(
CheckKind::NoUnderIndentation, CheckKind::NoUnderIndentation,
Range { Range::new(
location: Location::new(docstring.expr.location.row() + i, 0), Location::new(docstring.expr.location.row() + i, 0),
end_location: Location::new(docstring.expr.location.row() + i, 0), Location::new(docstring.expr.location.row() + i, 0),
}, ),
); );
if checker.patch(check.kind.code()) { if checker.patch(check.kind.code()) {
check.amend(Fix::replacement( check.amend(Fix::replacement(
@ -462,10 +456,10 @@ pub fn indent(checker: &mut Checker, docstring: &Docstring) {
// enables autofix. // enables autofix.
let mut check = Check::new( let mut check = Check::new(
CheckKind::NoOverIndentation, CheckKind::NoOverIndentation,
Range { Range::new(
location: Location::new(docstring.expr.location.row() + i, 0), Location::new(docstring.expr.location.row() + i, 0),
end_location: Location::new(docstring.expr.location.row() + i, 0), Location::new(docstring.expr.location.row() + i, 0),
}, ),
); );
if checker.patch(check.kind.code()) { if checker.patch(check.kind.code()) {
check.amend(Fix::replacement( check.amend(Fix::replacement(
@ -486,10 +480,10 @@ pub fn indent(checker: &mut Checker, docstring: &Docstring) {
if line_indent.len() > docstring.indentation.len() { if line_indent.len() > docstring.indentation.len() {
let mut check = Check::new( let mut check = Check::new(
CheckKind::NoOverIndentation, CheckKind::NoOverIndentation,
Range { Range::new(
location: Location::new(docstring.expr.location.row() + i, 0), Location::new(docstring.expr.location.row() + i, 0),
end_location: Location::new(docstring.expr.location.row() + i, 0), Location::new(docstring.expr.location.row() + i, 0),
}, ),
); );
if checker.patch(check.kind.code()) { if checker.patch(check.kind.code()) {
check.amend(Fix::replacement( check.amend(Fix::replacement(

View file

@ -13,10 +13,10 @@ pub fn blanket_noqa(lineno: usize, line: &str) -> Option<Check> {
BLANKET_NOQA_REGEX.find(line).map(|m| { BLANKET_NOQA_REGEX.find(line).map(|m| {
Check::new( Check::new(
CheckKind::BlanketNOQA, CheckKind::BlanketNOQA,
Range { Range::new(
location: Location::new(lineno + 1, m.start()), Location::new(lineno + 1, m.start()),
end_location: Location::new(lineno + 1, m.end()), Location::new(lineno + 1, m.end()),
}, ),
) )
}) })
} }

View file

@ -13,10 +13,10 @@ pub fn blanket_type_ignore(lineno: usize, line: &str) -> Option<Check> {
BLANKET_TYPE_IGNORE_REGEX.find(line).map(|m| { BLANKET_TYPE_IGNORE_REGEX.find(line).map(|m| {
Check::new( Check::new(
CheckKind::BlanketTypeIgnore, CheckKind::BlanketTypeIgnore,
Range { Range::new(
location: Location::new(lineno + 1, m.start()), Location::new(lineno + 1, m.start()),
end_location: Location::new(lineno + 1, m.end()), Location::new(lineno + 1, m.end()),
}, ),
) )
}) })
} }

View file

@ -170,10 +170,7 @@ pub fn unnecessary_coding_comment(lineno: usize, line: &str, autofix: bool) -> O
if CODING_COMMENT_REGEX.is_match(line) { if CODING_COMMENT_REGEX.is_match(line) {
let mut check = Check::new( let mut check = Check::new(
CheckKind::PEP3120UnnecessaryCodingComment, CheckKind::PEP3120UnnecessaryCodingComment,
Range { Range::new(Location::new(lineno + 1, 0), Location::new(lineno + 2, 0)),
location: Location::new(lineno + 1, 0),
end_location: Location::new(lineno + 2, 0),
},
); );
if autofix { if autofix {
check.amend(Fix::deletion( check.amend(Fix::deletion(
@ -216,10 +213,7 @@ pub fn unnecessary_lru_cache_params(
continue; continue;
} }
let range = Range { let range = Range::new(func.end_location.unwrap(), expr.end_location.unwrap());
location: func.end_location.unwrap(),
end_location: expr.end_location.unwrap(),
};
// Ex) `functools.lru_cache()` // Ex) `functools.lru_cache()`
if keywords.is_empty() { if keywords.is_empty() {
return Some(Check::new(CheckKind::UnnecessaryLRUCacheParams, range)); return Some(Check::new(CheckKind::UnnecessaryLRUCacheParams, range));

View file

@ -42,6 +42,9 @@ mod tests {
#[test_case(CheckCode::UP021, Path::new("UP021.py"); "UP021")] #[test_case(CheckCode::UP021, Path::new("UP021.py"); "UP021")]
#[test_case(CheckCode::UP022, Path::new("UP022.py"); "UP022")] #[test_case(CheckCode::UP022, Path::new("UP022.py"); "UP022")]
#[test_case(CheckCode::UP023, Path::new("UP023.py"); "UP023")] #[test_case(CheckCode::UP023, Path::new("UP023.py"); "UP023")]
#[test_case(CheckCode::UP024, Path::new("UP024_0.py"); "UP024_0")]
#[test_case(CheckCode::UP024, Path::new("UP024_1.py"); "UP024_1")]
#[test_case(CheckCode::UP024, Path::new("UP024_2.py"); "UP024_2")]
#[test_case(CheckCode::UP025, Path::new("UP025.py"); "UP025")] #[test_case(CheckCode::UP025, Path::new("UP025.py"); "UP025")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> { fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy()); let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());

View file

@ -4,6 +4,7 @@ pub use datetime_utc_alias::datetime_utc_alias;
pub use deprecated_unittest_alias::deprecated_unittest_alias; pub use deprecated_unittest_alias::deprecated_unittest_alias;
pub use native_literals::native_literals; pub use native_literals::native_literals;
pub use open_alias::open_alias; pub use open_alias::open_alias;
pub use os_error_alias::os_error_alias;
pub use redundant_open_modes::redundant_open_modes; pub use redundant_open_modes::redundant_open_modes;
pub use remove_six_compat::remove_six_compat; pub use remove_six_compat::remove_six_compat;
pub use replace_stdout_stderr::replace_stdout_stderr; pub use replace_stdout_stderr::replace_stdout_stderr;
@ -27,6 +28,7 @@ mod datetime_utc_alias;
mod deprecated_unittest_alias; mod deprecated_unittest_alias;
mod native_literals; mod native_literals;
mod open_alias; mod open_alias;
mod os_error_alias;
mod redundant_open_modes; mod redundant_open_modes;
mod remove_six_compat; mod remove_six_compat;
mod replace_stdout_stderr; mod replace_stdout_stderr;

View file

@ -0,0 +1,236 @@
#![allow(clippy::len_zero, clippy::needless_pass_by_value)]
use itertools::Itertools;
use rustpython_ast::{Excepthandler, ExcepthandlerKind, Expr, ExprKind, Located};
use crate::ast::helpers::{compose_call_path, match_module_member};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::checks::{Check, CheckKind};
const ERROR_NAMES: &[&str] = &["EnvironmentError", "IOError", "WindowsError"];
const ERROR_MODULES: &[&str] = &["mmap", "select", "socket"];
fn get_correct_name(original: &str) -> String {
if ERROR_NAMES.contains(&original) {
"OSError".to_string()
} else {
original.to_string()
}
}
fn get_before_replace(elts: &[Expr]) -> Vec<String> {
elts.iter()
.map(|elt| {
if let ExprKind::Name { id, .. } = &elt.node {
id.to_string()
} else {
String::new()
}
})
.collect()
}
fn check_module(checker: &Checker, expr: &Expr) -> (Vec<String>, Vec<String>) {
let mut replacements: Vec<String> = vec![];
let mut before_replace: Vec<String> = vec![];
for module in ERROR_MODULES.iter() {
if match_module_member(
expr,
module,
"error",
&checker.from_imports,
&checker.import_aliases,
) {
replacements.push("OSError".to_string());
before_replace.push(format!("{module}.error"));
break;
}
}
(replacements, before_replace)
}
fn handle_name_or_attribute(
checker: &Checker,
item: &Expr,
replacements: &mut Vec<String>,
before_replace: &mut Vec<String>,
) {
match &item.node {
ExprKind::Name { id, .. } => {
let (temp_replacements, temp_before_replace) = check_module(checker, item);
replacements.extend(temp_replacements);
before_replace.extend(temp_before_replace);
if replacements.is_empty() {
let new_name = get_correct_name(id);
replacements.push(new_name);
before_replace.push(id.to_string());
}
}
ExprKind::Attribute { .. } => {
let (temp_replacements, temp_before_replace) = check_module(checker, item);
replacements.extend(temp_replacements);
before_replace.extend(temp_before_replace);
}
_ => (),
}
}
/// Handles one block of an except (use a loop if there are multile blocks)
fn handle_except_block(checker: &mut Checker, handler: &Located<ExcepthandlerKind>) {
let ExcepthandlerKind::ExceptHandler { type_, .. } = &handler.node;
let Some(error_handlers) = type_.as_ref() else {
return;
};
// The first part creates list of all the exceptions being caught, and
// what they should be changed to
let mut replacements: Vec<String> = vec![];
let mut before_replace: Vec<String> = vec![];
match &error_handlers.node {
ExprKind::Name { .. } | ExprKind::Attribute { .. } => {
handle_name_or_attribute(
checker,
error_handlers,
&mut replacements,
&mut before_replace,
);
}
ExprKind::Tuple { elts, .. } => {
before_replace = get_before_replace(elts);
for elt in elts {
match &elt.node {
ExprKind::Name { id, .. } => {
let new_name = get_correct_name(id);
replacements.push(new_name);
}
ExprKind::Attribute { .. } => {
let (new_replacements, new_before_replace) = check_module(checker, elt);
replacements.extend(new_replacements);
before_replace.extend(new_before_replace);
}
_ => (),
}
}
}
_ => return,
}
replacements = replacements
.iter()
.unique()
.map(std::string::ToString::to_string)
.collect();
before_replace = before_replace
.iter()
.filter(|x| !x.is_empty())
.map(std::string::ToString::to_string)
.collect();
// This part checks if there are differences between what there is and
// what there should be. Where differences, the changes are applied
handle_making_changes(checker, error_handlers, &before_replace, &replacements);
}
fn handle_making_changes(
checker: &mut Checker,
target: &Expr,
before_replace: &[String],
replacements: &[String],
) {
if before_replace != replacements && replacements.len() > 0 {
let range = Range::new(target.location, target.end_location.unwrap());
let contents = checker.locator.slice_source_code_range(&range);
// Pyyupgrade does not want imports changed if a module only is
// surrounded by parentheses. For example: `except mmap.error:`
// would be changed, but: `(mmap).error:` would not. One issue with
// this implementation is that any valid changes will also be
// ignored. Let me know if you want me to go with a more
// complicated solution that avoids this.
if contents.contains(").") {
return;
}
let mut final_str: String;
if replacements.len() == 1 {
final_str = replacements.get(0).unwrap().to_string();
} else {
final_str = replacements.join(", ");
final_str.insert(0, '(');
final_str.push(')');
}
let mut check = Check::new(CheckKind::OSErrorAlias(compose_call_path(target)), range);
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
final_str,
range.location,
range.end_location,
));
}
checker.add_check(check);
}
}
// This is a hacky way to handle the different variable types we get since
// raise and try are very different. Would love input on a cleaner way
pub trait OSErrorAliasChecker {
fn check_error(&self, checker: &mut Checker)
where
Self: Sized;
}
impl OSErrorAliasChecker for &Vec<Excepthandler> {
fn check_error(&self, checker: &mut Checker) {
// Each separate except block is a separate error and fix
for handler in self.iter() {
handle_except_block(checker, handler);
}
}
}
impl OSErrorAliasChecker for &Box<Expr> {
fn check_error(&self, checker: &mut Checker) {
let mut replacements: Vec<String> = vec![];
let mut before_replace: Vec<String> = vec![];
match &self.node {
ExprKind::Name { .. } | ExprKind::Attribute { .. } => {
handle_name_or_attribute(checker, self, &mut replacements, &mut before_replace);
}
_ => return,
}
handle_making_changes(checker, self, &before_replace, &replacements);
}
}
impl OSErrorAliasChecker for &Expr {
fn check_error(&self, checker: &mut Checker) {
let mut replacements: Vec<String> = vec![];
let mut before_replace: Vec<String> = vec![];
let change_target: &Expr;
match &self.node {
ExprKind::Name { .. } | ExprKind::Attribute { .. } => {
change_target = self;
handle_name_or_attribute(checker, self, &mut replacements, &mut before_replace);
}
ExprKind::Call { func, .. } => {
change_target = func;
match &func.node {
ExprKind::Name { .. } | ExprKind::Attribute { .. } => {
handle_name_or_attribute(
checker,
func,
&mut replacements,
&mut before_replace,
);
}
_ => return,
}
}
_ => return,
}
handle_making_changes(checker, change_target, &before_replace, &replacements);
}
}
/// UP024
pub fn os_error_alias<U: OSErrorAliasChecker>(checker: &mut Checker, handlers: U) {
handlers.check_error(checker);
}

View file

@ -107,10 +107,8 @@ fn create_remove_param_fix(
expr: &Expr, expr: &Expr,
mode_param: &Expr, mode_param: &Expr,
) -> Result<Fix> { ) -> Result<Fix> {
let content = locator.slice_source_code_range(&Range { let content =
location: expr.location, locator.slice_source_code_range(&Range::new(expr.location, expr.end_location.unwrap()));
end_location: expr.end_location.unwrap(),
});
// Find the last comma before mode_param and create a deletion fix // Find the last comma before mode_param and create a deletion fix
// starting from the comma and ending after mode_param. // starting from the comma and ending after mode_param.
let mut fix_start: Option<Location> = None; let mut fix_start: Option<Location> = None;

View file

@ -65,10 +65,10 @@ fn replace_by_str_literal(
let content = format!( let content = format!(
"{}{}", "{}{}",
if binary { "b" } else { "" }, if binary { "b" } else { "" },
locator.slice_source_code_range(&Range { locator.slice_source_code_range(&Range::new(
location: arg.location, arg.location,
end_location: arg.end_location.unwrap(), arg.end_location.unwrap(),
}) ))
); );
check.amend(Fix::replacement( check.amend(Fix::replacement(
content, content,

View file

@ -87,10 +87,12 @@ pub fn replace_stdout_stderr(checker: &mut Checker, expr: &Expr, kwargs: &[Keywo
stderr stderr
}; };
let mut contents = String::from("capture_output=True"); let mut contents = String::from("capture_output=True");
if let Some(middle) = extract_middle(&checker.locator.slice_source_code_range(&Range { if let Some(middle) =
location: first.end_location.unwrap(), extract_middle(&checker.locator.slice_source_code_range(&Range::new(
end_location: last.location, first.end_location.unwrap(),
})) { last.location,
)))
{
if middle.multi_line { if middle.multi_line {
contents.push(','); contents.push(',');
contents.push('\n'); contents.push('\n');

View file

@ -16,13 +16,13 @@ pub fn replace_universal_newlines(checker: &mut Checker, expr: &Expr, kwargs: &[
&checker.import_aliases, &checker.import_aliases,
) { ) {
let Some(kwarg) = find_keyword(kwargs, "universal_newlines") else { return; }; let Some(kwarg) = find_keyword(kwargs, "universal_newlines") else { return; };
let range = Range { let range = Range::new(
location: kwarg.location, kwarg.location,
end_location: Location::new( Location::new(
kwarg.location.row(), kwarg.location.row(),
kwarg.location.column() + "universal_newlines".len(), kwarg.location.column() + "universal_newlines".len(),
), ),
}; );
let mut check = Check::new(CheckKind::ReplaceUniversalNewlines, range); let mut check = Check::new(CheckKind::ReplaceUniversalNewlines, range);
if checker.patch(check.kind.code()) { if checker.patch(check.kind.code()) {
check.amend(Fix::replacement( check.amend(Fix::replacement(

View file

@ -84,10 +84,10 @@ fn replace_with_bytes_literal(
) -> Check { ) -> Check {
let mut check = Check::new(CheckKind::UnnecessaryEncodeUTF8, Range::from_located(expr)); let mut check = Check::new(CheckKind::UnnecessaryEncodeUTF8, Range::from_located(expr));
if patch { if patch {
let content = locator.slice_source_code_range(&Range { let content = locator.slice_source_code_range(&Range::new(
location: constant.location, constant.location,
end_location: constant.end_location.unwrap(), constant.end_location.unwrap(),
}); ));
let content = format!( let content = format!(
"b{}", "b{}",
content.trim_start_matches('u').trim_start_matches('U') content.trim_start_matches('u').trim_start_matches('U')

View file

@ -0,0 +1,209 @@
---
source: src/pyupgrade/mod.rs
expression: checks
---
- kind:
OSErrorAlias: EnvironmentError
location:
row: 6
column: 7
end_location:
row: 6
column: 23
fix:
content: OSError
location:
row: 6
column: 7
end_location:
row: 6
column: 23
parent: ~
- kind:
OSErrorAlias: IOError
location:
row: 11
column: 7
end_location:
row: 11
column: 14
fix:
content: OSError
location:
row: 11
column: 7
end_location:
row: 11
column: 14
parent: ~
- kind:
OSErrorAlias: WindowsError
location:
row: 16
column: 7
end_location:
row: 16
column: 19
fix:
content: OSError
location:
row: 16
column: 7
end_location:
row: 16
column: 19
parent: ~
- kind:
OSErrorAlias: mmap.error
location:
row: 21
column: 7
end_location:
row: 21
column: 17
fix:
content: OSError
location:
row: 21
column: 7
end_location:
row: 21
column: 17
parent: ~
- kind:
OSErrorAlias: select.error
location:
row: 26
column: 7
end_location:
row: 26
column: 19
fix:
content: OSError
location:
row: 26
column: 7
end_location:
row: 26
column: 19
parent: ~
- kind:
OSErrorAlias: socket.error
location:
row: 31
column: 7
end_location:
row: 31
column: 19
fix:
content: OSError
location:
row: 31
column: 7
end_location:
row: 31
column: 19
parent: ~
- kind:
OSErrorAlias: error
location:
row: 36
column: 7
end_location:
row: 36
column: 12
fix:
content: OSError
location:
row: 36
column: 7
end_location:
row: 36
column: 12
parent: ~
- kind:
OSErrorAlias: ~
location:
row: 43
column: 7
end_location:
row: 43
column: 17
fix:
content: OSError
location:
row: 43
column: 7
end_location:
row: 43
column: 17
parent: ~
- kind:
OSErrorAlias: ~
location:
row: 47
column: 7
end_location:
row: 47
column: 20
fix:
content: OSError
location:
row: 47
column: 7
end_location:
row: 47
column: 20
parent: ~
- kind:
OSErrorAlias: ~
location:
row: 51
column: 7
end_location:
row: 51
column: 57
fix:
content: OSError
location:
row: 51
column: 7
end_location:
row: 51
column: 57
parent: ~
- kind:
OSErrorAlias: ~
location:
row: 58
column: 7
end_location:
row: 58
column: 35
fix:
content: "(OSError, KeyError)"
location:
row: 58
column: 7
end_location:
row: 58
column: 35
parent: ~
- kind:
OSErrorAlias: ~
location:
row: 65
column: 7
end_location:
row: 65
column: 23
fix:
content: "(OSError, error)"
location:
row: 65
column: 7
end_location:
row: 65
column: 23
parent: ~

View file

@ -0,0 +1,56 @@
---
source: src/pyupgrade/mod.rs
expression: checks
---
- kind:
OSErrorAlias: ~
location:
row: 5
column: 7
end_location:
row: 5
column: 37
fix:
content: OSError
location:
row: 5
column: 7
end_location:
row: 5
column: 37
parent: ~
- kind:
OSErrorAlias: ~
location:
row: 7
column: 7
end_location:
row: 7
column: 40
fix:
content: "(OSError, KeyError)"
location:
row: 7
column: 7
end_location:
row: 7
column: 40
parent: ~
- kind:
OSErrorAlias: ~
location:
row: 12
column: 7
end_location:
row: 16
column: 1
fix:
content: OSError
location:
row: 12
column: 7
end_location:
row: 16
column: 1
parent: ~

View file

@ -0,0 +1,345 @@
---
source: src/pyupgrade/mod.rs
expression: checks
---
- kind:
OSErrorAlias: socket.error
location:
row: 10
column: 6
end_location:
row: 10
column: 18
fix:
content: OSError
location:
row: 10
column: 6
end_location:
row: 10
column: 18
parent: ~
- kind:
OSErrorAlias: mmap.error
location:
row: 11
column: 6
end_location:
row: 11
column: 16
fix:
content: OSError
location:
row: 11
column: 6
end_location:
row: 11
column: 16
parent: ~
- kind:
OSErrorAlias: select.error
location:
row: 12
column: 6
end_location:
row: 12
column: 18
fix:
content: OSError
location:
row: 12
column: 6
end_location:
row: 12
column: 18
parent: ~
- kind:
OSErrorAlias: socket.error
location:
row: 14
column: 6
end_location:
row: 14
column: 18
fix:
content: OSError
location:
row: 14
column: 6
end_location:
row: 14
column: 18
parent: ~
- kind:
OSErrorAlias: mmap.error
location:
row: 15
column: 6
end_location:
row: 15
column: 16
fix:
content: OSError
location:
row: 15
column: 6
end_location:
row: 15
column: 16
parent: ~
- kind:
OSErrorAlias: select.error
location:
row: 16
column: 6
end_location:
row: 16
column: 18
fix:
content: OSError
location:
row: 16
column: 6
end_location:
row: 16
column: 18
parent: ~
- kind:
OSErrorAlias: socket.error
location:
row: 18
column: 6
end_location:
row: 18
column: 18
fix:
content: OSError
location:
row: 18
column: 6
end_location:
row: 18
column: 18
parent: ~
- kind:
OSErrorAlias: error
location:
row: 25
column: 6
end_location:
row: 25
column: 11
fix:
content: OSError
location:
row: 25
column: 6
end_location:
row: 25
column: 11
parent: ~
- kind:
OSErrorAlias: error
location:
row: 28
column: 6
end_location:
row: 28
column: 11
fix:
content: OSError
location:
row: 28
column: 6
end_location:
row: 28
column: 11
parent: ~
- kind:
OSErrorAlias: error
location:
row: 31
column: 6
end_location:
row: 31
column: 11
fix:
content: OSError
location:
row: 31
column: 6
end_location:
row: 31
column: 11
parent: ~
- kind:
OSErrorAlias: EnvironmentError
location:
row: 34
column: 6
end_location:
row: 34
column: 22
fix:
content: OSError
location:
row: 34
column: 6
end_location:
row: 34
column: 22
parent: ~
- kind:
OSErrorAlias: IOError
location:
row: 35
column: 6
end_location:
row: 35
column: 13
fix:
content: OSError
location:
row: 35
column: 6
end_location:
row: 35
column: 13
parent: ~
- kind:
OSErrorAlias: WindowsError
location:
row: 36
column: 6
end_location:
row: 36
column: 18
fix:
content: OSError
location:
row: 36
column: 6
end_location:
row: 36
column: 18
parent: ~
- kind:
OSErrorAlias: EnvironmentError
location:
row: 38
column: 6
end_location:
row: 38
column: 22
fix:
content: OSError
location:
row: 38
column: 6
end_location:
row: 38
column: 22
parent: ~
- kind:
OSErrorAlias: IOError
location:
row: 39
column: 6
end_location:
row: 39
column: 13
fix:
content: OSError
location:
row: 39
column: 6
end_location:
row: 39
column: 13
parent: ~
- kind:
OSErrorAlias: WindowsError
location:
row: 40
column: 6
end_location:
row: 40
column: 18
fix:
content: OSError
location:
row: 40
column: 6
end_location:
row: 40
column: 18
parent: ~
- kind:
OSErrorAlias: EnvironmentError
location:
row: 42
column: 6
end_location:
row: 42
column: 22
fix:
content: OSError
location:
row: 42
column: 6
end_location:
row: 42
column: 22
parent: ~
- kind:
OSErrorAlias: WindowsError
location:
row: 48
column: 6
end_location:
row: 48
column: 18
fix:
content: OSError
location:
row: 48
column: 6
end_location:
row: 48
column: 18
parent: ~
- kind:
OSErrorAlias: EnvironmentError
location:
row: 49
column: 6
end_location:
row: 49
column: 22
fix:
content: OSError
location:
row: 49
column: 6
end_location:
row: 49
column: 22
parent: ~
- kind:
OSErrorAlias: IOError
location:
row: 50
column: 6
end_location:
row: 50
column: 13
fix:
content: OSError
location:
row: 50
column: 6
end_location:
row: 50
column: 13
parent: ~

View file

@ -1614,10 +1614,7 @@ pub fn ambiguous_unicode_character(
) -> Vec<Check> { ) -> Vec<Check> {
let mut checks = vec![]; let mut checks = vec![];
let text = locator.slice_source_code_range(&Range { let text = locator.slice_source_code_range(&Range::new(start, end));
location: start,
end_location: end,
});
let mut col_offset = 0; let mut col_offset = 0;
let mut row_offset = 0; let mut row_offset = 0;
@ -1648,10 +1645,7 @@ pub fn ambiguous_unicode_character(
representant, representant,
), ),
}, },
Range { Range::new(location, end_location),
location,
end_location,
},
); );
if settings.enabled.contains(check.kind.code()) { if settings.enabled.contains(check.kind.code()) {
if matches!(autofix, flags::Autofix::Enabled) if matches!(autofix, flags::Autofix::Enabled)

View file

@ -125,10 +125,7 @@ fn detect_indentation(contents: &str, locator: &SourceCodeLocator) -> Option<Ind
for (_start, tok, end) in lexer::make_tokenizer(contents).flatten() { for (_start, tok, end) in lexer::make_tokenizer(contents).flatten() {
if let Tok::Indent { .. } = tok { if let Tok::Indent { .. } = tok {
let start = Location::new(end.row(), 0); let start = Location::new(end.row(), 0);
let whitespace = locator.slice_source_code_range(&Range { let whitespace = locator.slice_source_code_range(&Range::new(start, end));
location: start,
end_location: end,
});
return Some(Indentation(whitespace.to_string())); return Some(Indentation(whitespace.to_string()));
} }
} }
@ -139,10 +136,7 @@ fn detect_indentation(contents: &str, locator: &SourceCodeLocator) -> Option<Ind
fn detect_quote(contents: &str, locator: &SourceCodeLocator) -> Option<Quote> { fn detect_quote(contents: &str, locator: &SourceCodeLocator) -> Option<Quote> {
for (start, tok, end) in lexer::make_tokenizer(contents).flatten() { for (start, tok, end) in lexer::make_tokenizer(contents).flatten() {
if let Tok::String { .. } = tok { if let Tok::String { .. } = tok {
let content = locator.slice_source_code_range(&Range { let content = locator.slice_source_code_range(&Range::new(start, end));
location: start,
end_location: end,
});
if let Some(pattern) = leading_quote(&content) { if let Some(pattern) = leading_quote(&content) {
if pattern.contains('\'') { if pattern.contains('\'') {
return Some(Quote::Single); return Some(Quote::Single);