Reduce indents (#1116)

This commit is contained in:
Jeong YunWon 2022-12-07 23:20:33 +09:00 committed by GitHub
parent 92b9ab3010
commit c5451cd8ad
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 594 additions and 561 deletions

View file

@ -139,20 +139,21 @@ pub fn get(
return None; return None;
}; };
if let Ok(encoded) = read_sync(cache_key(path, settings, autofix)) { let encoded = read_sync(cache_key(path, settings, autofix)).ok()?;
match bincode::deserialize::<CheckResult>(&encoded[..]) { let (mtime, messages) = match bincode::deserialize::<CheckResult>(&encoded[..]) {
Ok(CheckResult { Ok(CheckResult {
metadata: CacheMetadata { mtime }, metadata: CacheMetadata { mtime },
messages, messages,
}) => { }) => (mtime, messages),
if FileTime::from_last_modification_time(metadata).unix_seconds() == mtime { Err(e) => {
return Some(messages); error!("Failed to deserialize encoded cache entry: {e:?}");
} return None;
}
Err(e) => error!("Failed to deserialize encoded cache entry: {e:?}"),
} }
};
if FileTime::from_last_modification_time(metadata).unix_seconds() != mtime {
return None;
} }
None Some(messages)
} }
/// Set a value in the cache. /// Set a value in the cache.

View file

@ -142,15 +142,16 @@ impl<'a> Checker<'a> {
// If we're in an f-string, override the location. RustPython doesn't produce // If we're in an f-string, override the location. RustPython doesn't produce
// reliable locations for expressions within f-strings, so we use the // reliable locations for expressions within f-strings, so we use the
// span of the f-string itself as a best-effort default. // span of the f-string itself as a best-effort default.
if let Some(range) = self.in_f_string { let check = if let Some(range) = self.in_f_string {
self.checks.push(Check { Check {
location: range.location, location: range.location,
end_location: range.end_location, end_location: range.end_location,
..check ..check
}); }
} else { } else {
self.checks.push(check); check
} };
self.checks.push(check);
} }
/// Add multiple `Check` items to the `Checker`. /// Add multiple `Check` items to the `Checker`.

View file

@ -20,19 +20,18 @@ static URL_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^https?://\S+$").unwra
/// Whether the given line is too long and should be reported. /// Whether the given line is too long and should be reported.
fn should_enforce_line_length(line: &str, length: usize, limit: usize) -> bool { fn should_enforce_line_length(line: &str, length: usize, limit: usize) -> bool {
if length > limit { if length <= limit {
let mut chunks = line.split_whitespace(); return false;
if let (Some(first), Some(_)) = (chunks.next(), chunks.next()) {
// Do not enforce the line length for commented lines that end with a URL
// or contain only a single word.
!(first == "#" && chunks.last().map_or(true, |c| URL_REGEX.is_match(c)))
} else {
// Single word / no printable chars - no way to make the line shorter
false
}
} else {
false
} }
let mut chunks = line.split_whitespace();
let (Some(first), Some(_)) = (chunks.next(), chunks.next()) else {
// Single word / no printable chars - no way to make the line shorter
return false;
};
// Do not enforce the line length for commented lines that end with a URL
// or contain only a single word.
!(first == "#" && chunks.last().map_or(true, |c| URL_REGEX.is_match(c)))
} }
pub fn check_lines( pub fn check_lines(

View file

@ -93,32 +93,34 @@ pub fn extract_isort_directives(lxr: &[LexResult], locator: &SourceCodeLocator)
continue; continue;
} }
if matches!(tok, Tok::Comment) { if !matches!(tok, Tok::Comment) {
// TODO(charlie): Modify RustPython to include the comment text in the token. continue;
let comment_text = locator.slice_source_code_range(&Range { }
location: start,
end_location: end,
});
if comment_text == "# isort: split" { // TODO(charlie): Modify RustPython to include the comment text in the token.
splits.push(start.row()); let comment_text = locator.slice_source_code_range(&Range {
} else if comment_text == "# isort: skip_file" { location: start,
skip_file = true; end_location: end,
} else if off.is_some() { });
if comment_text == "# isort: on" {
if let Some(start) = off { if comment_text == "# isort: split" {
for row in start.row() + 1..=end.row() { splits.push(start.row());
exclusions.insert(row); } else if comment_text == "# isort: skip_file" {
} skip_file = true;
} else if off.is_some() {
if comment_text == "# isort: on" {
if let Some(start) = off {
for row in start.row() + 1..=end.row() {
exclusions.insert(row);
} }
off = None;
}
} else {
if comment_text.contains("isort: skip") {
exclusions.insert(start.row());
} else if comment_text == "# isort: off" {
off = Some(start);
} }
off = None;
}
} else {
if comment_text.contains("isort: skip") {
exclusions.insert(start.row());
} else if comment_text == "# isort: off" {
off = Some(start);
} }
} }
} }

View file

@ -25,29 +25,30 @@ impl TryFrom<&str> for CFormatSummary {
let mut keywords = FxHashSet::default(); let mut keywords = FxHashSet::default();
for format_part in format_string.parts { for format_part in format_string.parts {
if let CFormatPart::Spec(CFormatSpec { let CFormatPart::Spec(CFormatSpec {
mapping_key, mapping_key,
min_field_width, min_field_width,
precision, precision,
.. ..
}) = format_part.1 }) = format_part.1 else
{ {
match mapping_key { continue;
Some(k) => { };
keywords.insert(k); match mapping_key {
} Some(k) => {
None => { keywords.insert(k);
num_positional += 1;
}
};
if min_field_width == Some(CFormatQuantity::FromValuesTuple) {
num_positional += 1;
starred = true;
} }
if precision == Some(CFormatQuantity::FromValuesTuple) { None => {
num_positional += 1; num_positional += 1;
starred = true;
} }
};
if min_field_width == Some(CFormatQuantity::FromValuesTuple) {
num_positional += 1;
starred = true;
}
if precision == Some(CFormatQuantity::FromValuesTuple) {
num_positional += 1;
starred = true;
} }
} }

View file

@ -77,43 +77,39 @@ pub(crate) fn percent_format_extra_named_arguments(
if summary.num_positional > 0 { if summary.num_positional > 0 {
return None; return None;
} }
let ExprKind::Dict { keys, values } = &right.node else {
if let ExprKind::Dict { keys, values } = &right.node { return None;
if values.len() > keys.len() { };
return None; // contains **x splat if values.len() > keys.len() {
} return None; // contains **x splat
let missing: Vec<&String> = keys
.iter()
.filter_map(|k| match &k.node {
// We can only check that string literals exist
ExprKind::Constant {
value: Constant::Str(value),
..
} => {
if summary.keywords.contains(value) {
None
} else {
Some(value)
}
}
_ => None,
})
.collect();
if missing.is_empty() {
None
} else {
Some(Check::new(
CheckKind::PercentFormatExtraNamedArguments(
missing.iter().map(|&s| s.clone()).collect(),
),
location,
))
}
} else {
None
} }
let missing: Vec<&String> = keys
.iter()
.filter_map(|k| match &k.node {
// We can only check that string literals exist
ExprKind::Constant {
value: Constant::Str(value),
..
} => {
if summary.keywords.contains(value) {
None
} else {
Some(value)
}
}
_ => None,
})
.collect();
if missing.is_empty() {
return None;
}
Some(Check::new(
CheckKind::PercentFormatExtraNamedArguments(missing.iter().map(|&s| s.clone()).collect()),
location,
))
} }
/// F505 /// F505

View file

@ -43,30 +43,31 @@ impl TryFrom<&str> for FormatSummary {
let mut keywords = FxHashSet::default(); let mut keywords = FxHashSet::default();
for format_part in format_string.format_parts { for format_part in format_string.format_parts {
if let FormatPart::Field { let FormatPart::Field {
field_name, field_name,
format_spec, format_spec,
.. ..
} = format_part } = format_part else {
{ continue;
};
let parsed = FieldName::parse(&field_name)?;
match parsed.field_type {
FieldType::Auto => autos.insert(autos.len()),
FieldType::Index(i) => indexes.insert(i),
FieldType::Keyword(k) => keywords.insert(k),
};
let nested = FormatString::from_str(&format_spec)?;
for nested_part in nested.format_parts {
let FormatPart::Field { field_name, .. } = nested_part else {
continue;
};
let parsed = FieldName::parse(&field_name)?; let parsed = FieldName::parse(&field_name)?;
match parsed.field_type { match parsed.field_type {
FieldType::Auto => autos.insert(autos.len()), FieldType::Auto => autos.insert(autos.len()),
FieldType::Index(i) => indexes.insert(i), FieldType::Index(i) => indexes.insert(i),
FieldType::Keyword(k) => keywords.insert(k), FieldType::Keyword(k) => keywords.insert(k),
}; };
let nested = FormatString::from_str(&format_spec)?;
for nested_part in nested.format_parts {
if let FormatPart::Field { field_name, .. } = nested_part {
let parsed = FieldName::parse(&field_name)?;
match parsed.field_type {
FieldType::Auto => autos.insert(autos.len()),
FieldType::Index(i) => indexes.insert(i),
FieldType::Keyword(k) => keywords.insert(k),
};
}
}
} }
} }

View file

@ -6,19 +6,22 @@ use crate::checks::{Check, CheckKind};
/// F633 /// F633
pub fn invalid_print_syntax(checker: &mut Checker, left: &Expr) { pub fn invalid_print_syntax(checker: &mut Checker, left: &Expr) {
if let ExprKind::Name { id, .. } = &left.node { let ExprKind::Name { id, .. } = &left.node else {
if id == "print" { return;
let scope = checker.current_scope(); };
if let Some(Binding { if id != "print" {
kind: BindingKind::Builtin, return;
..
}) = scope.values.get("print")
{
checker.add_check(Check::new(
CheckKind::InvalidPrintSyntax,
Range::from_located(left),
));
}
}
} }
let scope = checker.current_scope();
let Some(Binding {
kind: BindingKind::Builtin,
..
}) = scope.values.get("print") else
{
return;
};
checker.add_check(Check::new(
CheckKind::InvalidPrintSyntax,
Range::from_located(left),
));
} }

View file

@ -26,15 +26,16 @@ fn match_not_implemented(expr: &Expr) -> Option<&Expr> {
/// F901 /// F901
pub fn raise_not_implemented(checker: &mut Checker, expr: &Expr) { pub fn raise_not_implemented(checker: &mut Checker, expr: &Expr) {
if let Some(expr) = match_not_implemented(expr) { let Some(expr) = match_not_implemented(expr) else {
let mut check = Check::new(CheckKind::RaiseNotImplemented, Range::from_located(expr)); return;
if checker.patch(check.kind.code()) { };
check.amend(Fix::replacement( let mut check = Check::new(CheckKind::RaiseNotImplemented, Range::from_located(expr));
"NotImplementedError".to_string(), if checker.patch(check.kind.code()) {
expr.location, check.amend(Fix::replacement(
expr.end_location.unwrap(), "NotImplementedError".to_string(),
)); expr.location,
} expr.end_location.unwrap(),
checker.add_check(check); ));
} }
checker.add_check(check);
} }

View file

@ -5,11 +5,14 @@ use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind}; use crate::checks::{Check, CheckKind};
pub fn no_eval(checker: &mut Checker, func: &Expr) { pub fn no_eval(checker: &mut Checker, func: &Expr) {
if let ExprKind::Name { id, .. } = &func.node { let ExprKind::Name { id, .. } = &func.node else {
if id == "eval" { return;
if checker.is_builtin("eval") { };
checker.add_check(Check::new(CheckKind::NoEval, Range::from_located(func))); if id != "eval" {
} return;
}
} }
if !checker.is_builtin("eval") {
return;
}
checker.add_check(Check::new(CheckKind::NoEval, Range::from_located(func)));
} }

View file

@ -20,25 +20,28 @@ pub fn consider_merging_isinstance(
let mut obj_to_types: FxHashMap<String, (usize, FxHashSet<String>)> = FxHashMap::default(); let mut obj_to_types: FxHashMap<String, (usize, FxHashSet<String>)> = FxHashMap::default();
for value in values { for value in values {
if let ExprKind::Call { func, args, .. } = &value.node { let ExprKind::Call { func, args, .. } = &value.node else {
if matches!(&func.node, ExprKind::Name { id, .. } if id == "isinstance") { continue;
if let [obj, types] = &args[..] { };
let (num_calls, matches) = obj_to_types if !matches!(&func.node, ExprKind::Name { id, .. } if id == "isinstance") {
.entry(obj.to_string()) continue;
.or_insert_with(|| (0, FxHashSet::default()));
*num_calls += 1;
matches.extend(match &types.node {
ExprKind::Tuple { elts, .. } => {
elts.iter().map(std::string::ToString::to_string).collect()
}
_ => {
vec![types.to_string()]
}
});
}
}
} }
let [obj, types] = &args[..] else {
continue;
};
let (num_calls, matches) = obj_to_types
.entry(obj.to_string())
.or_insert_with(|| (0, FxHashSet::default()));
*num_calls += 1;
matches.extend(match &types.node {
ExprKind::Tuple { elts, .. } => {
elts.iter().map(std::string::ToString::to_string).collect()
}
_ => {
vec![types.to_string()]
}
});
} }
for (obj, (num_calls, types)) in obj_to_types { for (obj, (num_calls, types)) in obj_to_types {

View file

@ -7,14 +7,17 @@ use crate::Check;
/// PLR0402 /// PLR0402
pub fn consider_using_from_import(checker: &mut Checker, alias: &Alias) { pub fn consider_using_from_import(checker: &mut Checker, alias: &Alias) {
if let Some(asname) = &alias.node.asname { let Some(asname) = &alias.node.asname else {
if let Some((module, name)) = alias.node.name.rsplit_once('.') { return;
if name == asname { };
checker.add_check(Check::new( let Some((module, name)) = alias.node.name.rsplit_once('.') else {
CheckKind::ConsiderUsingFromImport(module.to_string(), name.to_string()), return;
Range::from_located(alias), };
)); if name != asname {
} return;
}
} }
checker.add_check(Check::new(
CheckKind::ConsiderUsingFromImport(module.to_string(), name.to_string()),
Range::from_located(alias),
));
} }

View file

@ -61,27 +61,29 @@ fn get_member_import_name_alias(checker: &Checker, module: &str, member: &str) -
/// RUF004 /// RUF004
pub fn consider_using_sys_exit(checker: &mut Checker, func: &Expr) { pub fn consider_using_sys_exit(checker: &mut Checker, func: &Expr) {
if let ExprKind::Name { id, .. } = &func.node { let ExprKind::Name { id, .. } = &func.node else {
for name in ["exit", "quit"] { return;
if id == name { };
if !(name == "exit" && is_module_star_imported(checker, "sys")) for name in ["exit", "quit"] {
&& checker.is_builtin(name) if id != name {
{ continue;
let mut check = }
Check::new(CheckKind::ConsiderUsingSysExit, Range::from_located(func)); if name == "exit" && is_module_star_imported(checker, "sys") {
if checker.patch(check.kind.code()) { continue;
if let Some(content) = get_member_import_name_alias(checker, "sys", "exit") }
{ if !checker.is_builtin(name) {
check.amend(Fix::replacement( continue;
content, }
func.location, let mut check = Check::new(CheckKind::ConsiderUsingSysExit, Range::from_located(func));
func.end_location.unwrap(), if checker.patch(check.kind.code()) {
)); if let Some(content) = get_member_import_name_alias(checker, "sys", "exit") {
} check.amend(Fix::replacement(
} content,
checker.add_check(check); func.location,
} func.end_location.unwrap(),
));
} }
} }
checker.add_check(check);
} }
} }

View file

@ -14,35 +14,43 @@ pub fn misplaced_comparison_constant(
ops: &[Cmpop], ops: &[Cmpop],
comparators: &[Expr], comparators: &[Expr],
) { ) {
if let ([op], [right]) = (ops, comparators) { let ([op], [right]) = (ops, comparators) else {
if matches!( return;
op, };
Cmpop::Eq | Cmpop::NotEq | Cmpop::Lt | Cmpop::LtE | Cmpop::Gt | Cmpop::GtE,
) && matches!(&left.node, &ExprKind::Constant { .. }) if !matches!(
&& !matches!(&right.node, &ExprKind::Constant { .. }) op,
{ Cmpop::Eq | Cmpop::NotEq | Cmpop::Lt | Cmpop::LtE | Cmpop::Gt | Cmpop::GtE,
let reversed_op = match op { ) {
Cmpop::Eq => "==", return;
Cmpop::NotEq => "!=",
Cmpop::Lt => ">",
Cmpop::LtE => ">=",
Cmpop::Gt => "<",
Cmpop::GtE => "<=",
_ => unreachable!("Expected comparison operator"),
};
let suggestion = format!("{right} {reversed_op} {left}");
let mut check = Check::new(
CheckKind::MisplacedComparisonConstant(suggestion.clone()),
Range::from_located(expr),
);
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
suggestion,
expr.location,
expr.end_location.unwrap(),
));
}
checker.add_check(check);
}
} }
if !matches!(&left.node, &ExprKind::Constant { .. }) {
return;
}
if matches!(&right.node, &ExprKind::Constant { .. }) {
return;
}
let reversed_op = match op {
Cmpop::Eq => "==",
Cmpop::NotEq => "!=",
Cmpop::Lt => ">",
Cmpop::LtE => ">=",
Cmpop::Gt => "<",
Cmpop::GtE => "<=",
_ => unreachable!("Expected comparison operator"),
};
let suggestion = format!("{right} {reversed_op} {left}");
let mut check = Check::new(
CheckKind::MisplacedComparisonConstant(suggestion.clone()),
Range::from_located(expr),
);
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
suggestion,
expr.location,
expr.end_location.unwrap(),
));
}
checker.add_check(check);
} }

View file

@ -12,23 +12,24 @@ pub fn property_with_parameters(
decorator_list: &[Expr], decorator_list: &[Expr],
args: &Arguments, args: &Arguments,
) { ) {
if decorator_list if !decorator_list
.iter() .iter()
.any(|d| matches!(&d.node, ExprKind::Name { id, .. } if id == "property")) .any(|d| matches!(&d.node, ExprKind::Name { id, .. } if id == "property"))
{ {
if checker.is_builtin("property") return;
&& args }
.args if checker.is_builtin("property")
.iter() && args
.chain(args.posonlyargs.iter()) .args
.chain(args.kwonlyargs.iter()) .iter()
.count() .chain(args.posonlyargs.iter())
> 1 .chain(args.kwonlyargs.iter())
{ .count()
checker.add_check(Check::new( > 1
CheckKind::PropertyWithParameters, {
Range::from_located(stmt), checker.add_check(Check::new(
)); CheckKind::PropertyWithParameters,
} Range::from_located(stmt),
));
} }
} }

View file

@ -8,17 +8,23 @@ use crate::Check;
/// PLC0414 /// PLC0414
pub fn useless_import_alias(checker: &mut Checker, alias: &Alias) { pub fn useless_import_alias(checker: &mut Checker, alias: &Alias) {
if let Some(asname) = &alias.node.asname { let Some(asname) = &alias.node.asname else {
if !alias.node.name.contains('.') && &alias.node.name == asname { return;
let mut check = Check::new(CheckKind::UselessImportAlias, Range::from_located(alias)); };
if checker.patch(check.kind.code()) { if alias.node.name.contains('.') {
check.amend(Fix::replacement( return;
asname.to_string(),
alias.location,
alias.end_location.unwrap(),
));
}
checker.add_check(check);
}
} }
if &alias.node.name != asname {
return;
}
let mut check = Check::new(CheckKind::UselessImportAlias, Range::from_located(alias));
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
asname.to_string(),
alias.location,
alias.end_location.unwrap(),
));
}
checker.add_check(check);
} }

View file

@ -113,26 +113,28 @@ pub fn remove_super_arguments(locator: &SourceCodeLocator, expr: &Expr) -> Optio
let mut tree = libcst_native::parse_module(&contents, None).ok()?; let mut tree = libcst_native::parse_module(&contents, None).ok()?;
if let Some(Statement::Simple(body)) = tree.body.first_mut() { let Statement::Simple(body) = tree.body.first_mut()? else {
if let Some(SmallStatement::Expr(body)) = body.body.first_mut() { return None;
if let Expression::Call(body) = &mut body.value { };
body.args = vec![]; let SmallStatement::Expr(body) = body.body.first_mut()? else {
body.whitespace_before_args = ParenthesizableWhitespace::default(); return None;
body.whitespace_after_func = ParenthesizableWhitespace::default(); };
let Expression::Call(body) = &mut body.value else {
return None;
};
let mut state = CodegenState::default(); body.args = vec![];
tree.codegen(&mut state); body.whitespace_before_args = ParenthesizableWhitespace::default();
body.whitespace_after_func = ParenthesizableWhitespace::default();
return Some(Fix::replacement( let mut state = CodegenState::default();
state.to_string(), tree.codegen(&mut state);
range.location,
range.end_location,
));
}
}
}
None Some(Fix::replacement(
state.to_string(),
range.location,
range.end_location,
))
} }
/// UP010 /// UP010

View file

@ -17,27 +17,27 @@ fn match_named_tuple_assign<'a>(
targets: &'a [Expr], targets: &'a [Expr],
value: &'a Expr, value: &'a Expr,
) -> Option<(&'a str, &'a [Expr], &'a [Keyword], &'a ExprKind)> { ) -> Option<(&'a str, &'a [Expr], &'a [Keyword], &'a ExprKind)> {
if let Some(target) = targets.get(0) { let target = targets.get(0)?;
if let ExprKind::Name { id: typename, .. } = &target.node { let ExprKind::Name { id: typename, .. } = &target.node else {
if let ExprKind::Call { return None;
func, };
args, let ExprKind::Call {
keywords, func,
} = &value.node args,
{ keywords,
if match_module_member( } = &value.node else {
func, return None;
"typing", };
"NamedTuple", if !match_module_member(
&checker.from_imports, func,
&checker.import_aliases, "typing",
) { "NamedTuple",
return Some((typename, args, keywords, &func.node)); &checker.from_imports,
} &checker.import_aliases,
} ) {
} return None;
} }
None Some((typename, args, keywords, &func.node))
} }
/// Generate a `StmtKind::AnnAssign` representing the provided property /// Generate a `StmtKind::AnnAssign` representing the provided property
@ -78,13 +78,14 @@ fn create_property_assignment_stmt(
/// Match the `defaults` keyword in a `NamedTuple(...)` call. /// Match the `defaults` keyword in a `NamedTuple(...)` call.
fn match_defaults(keywords: &[Keyword]) -> Result<&[Expr]> { fn match_defaults(keywords: &[Keyword]) -> Result<&[Expr]> {
match keywords.iter().find(|keyword| { let defaults = keywords.iter().find(|keyword| {
if let Some(arg) = &keyword.node.arg { if let Some(arg) = &keyword.node.arg {
arg.as_str() == "defaults" arg.as_str() == "defaults"
} else { } else {
false false
} }
}) { });
match defaults {
Some(defaults) => match &defaults.node.value.node { Some(defaults) => match &defaults.node.value.node {
ExprKind::List { elts, .. } => Ok(elts), ExprKind::List { elts, .. } => Ok(elts),
ExprKind::Tuple { elts, .. } => Ok(elts), ExprKind::Tuple { elts, .. } => Ok(elts),
@ -96,53 +97,45 @@ fn match_defaults(keywords: &[Keyword]) -> Result<&[Expr]> {
/// Create a list of property assignments from the `NamedTuple` arguments. /// Create a list of property assignments from the `NamedTuple` arguments.
fn create_properties_from_args(args: &[Expr], defaults: &[Expr]) -> Result<Vec<Stmt>> { fn create_properties_from_args(args: &[Expr], defaults: &[Expr]) -> Result<Vec<Stmt>> {
if let Some(fields) = args.get(1) { let Some(fields) = args.get(1) else {
if let ExprKind::List { elts, .. } = &fields.node { return Ok(vec![]);
let padded_defaults = if elts.len() >= defaults.len() { };
std::iter::repeat(None) let ExprKind::List { elts, .. } = &fields.node else {
.take(elts.len() - defaults.len()) bail!("Expected argument to be `ExprKind::List`");
.chain(defaults.iter().map(Some)) };
} else {
bail!("Defaults must be `None` or an iterable of at least the number of fields") let padded_defaults = if elts.len() >= defaults.len() {
}; std::iter::repeat(None)
elts.iter() .take(elts.len() - defaults.len())
.zip(padded_defaults) .chain(defaults.iter().map(Some))
.map(|(field, default)| {
if let ExprKind::Tuple { elts, .. } = &field.node {
if let [field_name, annotation] = elts.as_slice() {
if let ExprKind::Constant {
value: Constant::Str(property),
..
} = &field_name.node
{
if IDENTIFIER_REGEX.is_match(property)
&& !KWLIST.contains(&property.as_str())
{
Ok(create_property_assignment_stmt(
property,
&annotation.node,
default.map(|d| &d.node),
))
} else {
bail!("Invalid property name: {}", property)
}
} else {
bail!("Expected `field_name` to be `Constant::Str`")
}
} else {
bail!("Expected `elts` to have exactly two elements")
}
} else {
bail!("Expected `field` to be `ExprKind::Tuple`")
}
})
.collect()
} else {
bail!("Expected argument to be `ExprKind::List`")
}
} else { } else {
Ok(vec![]) bail!("Defaults must be `None` or an iterable of at least the number of fields")
} };
elts.iter()
.zip(padded_defaults)
.map(|(field, default)| {
let ExprKind::Tuple { elts, .. } = &field.node else {
bail!("Expected `field` to be `ExprKind::Tuple`")
};
let [field_name, annotation] = elts.as_slice() else {
bail!("Expected `elts` to have exactly two elements")
};
let ExprKind::Constant {
value: Constant::Str(property),
..
} = &field_name.node else {
bail!("Expected `field_name` to be `Constant::Str`")
};
if !IDENTIFIER_REGEX.is_match(property) || KWLIST.contains(&property.as_str()) {
bail!("Invalid property name: {}", property)
}
Ok(create_property_assignment_stmt(
property,
&annotation.node,
default.map(|d| &d.node),
))
})
.collect()
} }
/// Generate a `StmtKind:ClassDef` statement based on the provided body and /// Generate a `StmtKind:ClassDef` statement based on the provided body and
@ -188,26 +181,27 @@ pub fn convert_named_tuple_functional_to_class(
targets: &[Expr], targets: &[Expr],
value: &Expr, value: &Expr,
) { ) {
if let Some((typename, args, keywords, base_class)) = let Some((typename, args, keywords, base_class)) =
match_named_tuple_assign(checker, targets, value) match_named_tuple_assign(checker, targets, value) else
{ {
match match_defaults(keywords) { return;
Ok(defaults) => { };
if let Ok(properties) = create_properties_from_args(args, defaults) { match match_defaults(keywords) {
let mut check = Check::new( Ok(defaults) => {
CheckKind::ConvertNamedTupleFunctionalToClass(typename.to_string()), if let Ok(properties) = create_properties_from_args(args, defaults) {
Range::from_located(stmt), let mut check = Check::new(
); CheckKind::ConvertNamedTupleFunctionalToClass(typename.to_string()),
if checker.patch(check.kind.code()) { Range::from_located(stmt),
match convert_to_class(stmt, typename, properties, base_class) { );
Ok(fix) => check.amend(fix), if checker.patch(check.kind.code()) {
Err(err) => error!("Failed to convert `NamedTuple`: {err}"), match convert_to_class(stmt, typename, properties, base_class) {
} Ok(fix) => check.amend(fix),
Err(err) => error!("Failed to convert `NamedTuple`: {err}"),
} }
checker.add_check(check);
} }
checker.add_check(check);
} }
Err(err) => error!("Failed to parse defaults: {err}"),
} }
Err(err) => error!("Failed to parse defaults: {err}"),
} }
} }

View file

@ -20,27 +20,27 @@ fn match_typed_dict_assign<'a>(
targets: &'a [Expr], targets: &'a [Expr],
value: &'a Expr, value: &'a Expr,
) -> Option<(&'a str, &'a [Expr], &'a [Keyword], &'a ExprKind)> { ) -> Option<(&'a str, &'a [Expr], &'a [Keyword], &'a ExprKind)> {
if let Some(target) = targets.get(0) { let target = targets.get(0)?;
if let ExprKind::Name { id: class_name, .. } = &target.node { let ExprKind::Name { id: class_name, .. } = &target.node else {
if let ExprKind::Call { return None;
func, };
args, let ExprKind::Call {
keywords, func,
} = &value.node args,
{ keywords,
if match_module_member( } = &value.node else {
func, return None;
"typing", };
"TypedDict", if !match_module_member(
&checker.from_imports, func,
&checker.import_aliases, "typing",
) { "TypedDict",
return Some((class_name, args, keywords, &func.node)); &checker.from_imports,
} &checker.import_aliases,
} ) {
} return None;
} }
None Some((class_name, args, keywords, &func.node))
} }
/// Generate a `StmtKind::AnnAssign` representing the provided property /// Generate a `StmtKind::AnnAssign` representing the provided property
@ -127,15 +127,13 @@ fn get_properties_from_dict_literal(keys: &[Expr], values: &[Expr]) -> Result<Ve
} }
fn get_properties_from_dict_call(func: &Expr, keywords: &[Keyword]) -> Result<Vec<Stmt>> { fn get_properties_from_dict_call(func: &Expr, keywords: &[Keyword]) -> Result<Vec<Stmt>> {
if let ExprKind::Name { id, .. } = &func.node { let ExprKind::Name { id, .. } = &func.node else {
if id == "dict" {
get_properties_from_keywords(keywords)
} else {
bail!("Expected `id` to be `\"dict\"`")
}
} else {
bail!("Expected `func` to be `ExprKind::Name`") bail!("Expected `func` to be `ExprKind::Name`")
};
if id != "dict" {
bail!("Expected `id` to be `\"dict\"`")
} }
get_properties_from_keywords(keywords)
} }
// Deprecated in Python 3.11, removed in Python 3.13. // Deprecated in Python 3.11, removed in Python 3.13.
@ -158,15 +156,11 @@ fn get_properties_from_keywords(keywords: &[Keyword]) -> Result<Vec<Stmt>> {
// The only way to have the `total` keyword is to use the args version, like: // The only way to have the `total` keyword is to use the args version, like:
// (`TypedDict('name', {'a': int}, total=True)`) // (`TypedDict('name', {'a': int}, total=True)`)
fn get_total_from_only_keyword(keywords: &[Keyword]) -> Option<&KeywordData> { fn get_total_from_only_keyword(keywords: &[Keyword]) -> Option<&KeywordData> {
match keywords.get(0) { let keyword = keywords.get(0)?;
Some(keyword) => match &keyword.node.arg { let arg = &keyword.node.arg.as_ref()?;
Some(arg) => match arg.as_str() { match arg.as_str() {
"total" => Some(&keyword.node), "total" => Some(&keyword.node),
_ => None, _ => None,
},
None => None,
},
None => None,
} }
} }
@ -225,24 +219,27 @@ pub fn convert_typed_dict_functional_to_class(
targets: &[Expr], targets: &[Expr],
value: &Expr, value: &Expr,
) { ) {
if let Some((class_name, args, keywords, base_class)) = let Some((class_name, args, keywords, base_class)) =
match_typed_dict_assign(checker, targets, value) match_typed_dict_assign(checker, targets, value) else
{ {
match get_properties_and_total(args, keywords) { return;
Err(err) => error!("Failed to parse TypedDict: {err}"), };
Ok((body, total_keyword)) => { let (body, total_keyword) = match get_properties_and_total(args, keywords) {
let mut check = Check::new( Err(err) => {
CheckKind::ConvertTypedDictFunctionalToClass(class_name.to_string()), error!("Failed to parse TypedDict: {err}");
Range::from_located(stmt), return;
);
if checker.patch(check.kind.code()) {
match convert_to_class(stmt, class_name, body, total_keyword, base_class) {
Ok(fix) => check.amend(fix),
Err(err) => error!("Failed to convert TypedDict: {err}"),
};
}
checker.add_check(check);
}
} }
Ok(args) => args,
};
let mut check = Check::new(
CheckKind::ConvertTypedDictFunctionalToClass(class_name.to_string()),
Range::from_located(stmt),
);
if checker.patch(check.kind.code()) {
match convert_to_class(stmt, class_name, body, total_keyword, base_class) {
Ok(fix) => check.amend(fix),
Err(err) => error!("Failed to convert TypedDict: {err}"),
};
} }
checker.add_check(check);
} }

View file

@ -29,24 +29,28 @@ static DEPRECATED_ALIASES: Lazy<FxHashMap<&'static str, &'static str>> = Lazy::n
/// UP005 /// UP005
pub fn deprecated_unittest_alias(checker: &mut Checker, expr: &Expr) { pub fn deprecated_unittest_alias(checker: &mut Checker, expr: &Expr) {
if let ExprKind::Attribute { value, attr, .. } = &expr.node { let ExprKind::Attribute { value, attr, .. } = &expr.node else {
if let Some(&target) = DEPRECATED_ALIASES.get(attr.as_str()) { return;
if let ExprKind::Name { id, .. } = &value.node { };
if id == "self" { let Some(&target) = DEPRECATED_ALIASES.get(attr.as_str()) else {
let mut check = Check::new( return;
CheckKind::DeprecatedUnittestAlias(attr.to_string(), target.to_string()), };
Range::from_located(expr), let ExprKind::Name { id, .. } = &value.node else {
); return;
if checker.patch(check.kind.code()) { };
check.amend(Fix::replacement( if id != "self" {
format!("self.{target}"), return;
expr.location,
expr.end_location.unwrap(),
));
}
checker.add_check(check);
}
}
}
} }
let mut check = Check::new(
CheckKind::DeprecatedUnittestAlias(attr.to_string(), target.to_string()),
Range::from_located(expr),
);
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
format!("self.{target}"),
expr.location,
expr.end_location.unwrap(),
));
}
checker.add_check(check);
} }

View file

@ -9,20 +9,22 @@ use crate::pyupgrade::checks;
pub fn super_call_with_parameters(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) { pub fn super_call_with_parameters(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
// Only bother going through the super check at all if we're in a `super` call. // Only bother going through the super check at all if we're in a `super` call.
// (We check this in `check_super_args` too, so this is just an optimization.) // (We check this in `check_super_args` too, so this is just an optimization.)
if helpers::is_super_call_with_arguments(func, args) { if !helpers::is_super_call_with_arguments(func, args) {
let scope = checker.current_scope(); return;
let parents: Vec<&Stmt> = checker }
.parent_stack let scope = checker.current_scope();
.iter() let parents: Vec<&Stmt> = checker
.map(|index| checker.parents[*index]) .parent_stack
.collect(); .iter()
if let Some(mut check) = checks::super_args(scope, &parents, expr, func, args) { .map(|index| checker.parents[*index])
if checker.patch(check.kind.code()) { .collect();
if let Some(fix) = pyupgrade::fixes::remove_super_arguments(checker.locator, expr) { let Some(mut check) = checks::super_args(scope, &parents, expr, func, args) else {
check.amend(fix); return;
} };
} if checker.patch(check.kind.code()) {
checker.add_check(check); if let Some(fix) = pyupgrade::fixes::remove_super_arguments(checker.locator, expr) {
check.amend(fix);
} }
} }
checker.add_check(check);
} }

View file

@ -8,16 +8,17 @@ use crate::pyupgrade::checks;
/// UP003 /// UP003
pub fn type_of_primitive(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) { pub fn type_of_primitive(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
if let Some(mut check) = checks::type_of_primitive(func, args, Range::from_located(expr)) { let Some(mut check) = checks::type_of_primitive(func, args, Range::from_located(expr)) else {
if checker.patch(check.kind.code()) { return;
if let CheckKind::TypeOfPrimitive(primitive) = &check.kind { };
check.amend(Fix::replacement( if checker.patch(check.kind.code()) {
primitive.builtin(), if let CheckKind::TypeOfPrimitive(primitive) = &check.kind {
expr.location, check.amend(Fix::replacement(
expr.end_location.unwrap(), primitive.builtin(),
)); expr.location,
} expr.end_location.unwrap(),
));
} }
checker.add_check(check);
} }
checker.add_check(check);
} }

View file

@ -9,17 +9,17 @@ use crate::source_code_locator::SourceCodeLocator;
const UTF8_LITERALS: &[&str] = &["utf-8", "utf8", "utf_8", "u8", "utf", "cp65001"]; const UTF8_LITERALS: &[&str] = &["utf-8", "utf8", "utf_8", "u8", "utf", "cp65001"];
fn match_encoded_variable(func: &Expr) -> Option<&Expr> { fn match_encoded_variable(func: &Expr) -> Option<&Expr> {
if let ExprKind::Attribute { let ExprKind::Attribute {
value: variable, value: variable,
attr, attr,
.. ..
} = &func.node } = &func.node else {
{ return None;
if attr == "encode" { };
return Some(variable); if attr != "encode" {
} return None;
} }
None Some(variable)
} }
fn is_utf8_encoding_arg(arg: &Expr) -> bool { fn is_utf8_encoding_arg(arg: &Expr) -> bool {
@ -109,39 +109,27 @@ pub fn unnecessary_encode_utf8(
args: &Vec<Expr>, args: &Vec<Expr>,
kwargs: &Vec<Keyword>, kwargs: &Vec<Keyword>,
) { ) {
if let Some(variable) = match_encoded_variable(func) { let Some(variable) = match_encoded_variable(func) else {
match &variable.node { return;
ExprKind::Constant { };
value: Constant::Str(literal), match &variable.node {
.. ExprKind::Constant {
} => { value: Constant::Str(literal),
// "str".encode() ..
// "str".encode("utf-8") } => {
if is_default_encode(args, kwargs) { // "str".encode()
if literal.is_ascii() { // "str".encode("utf-8")
// "foo".encode() if is_default_encode(args, kwargs) {
checker.add_check(replace_with_bytes_literal( if literal.is_ascii() {
expr, // "foo".encode()
variable, checker.add_check(replace_with_bytes_literal(
checker.locator, expr,
checker.patch(&CheckCode::UP012), variable,
)); checker.locator,
} else { checker.patch(&CheckCode::UP012),
// "unicode text©".encode("utf-8") ));
if let Some(check) = delete_default_encode_arg_or_kwarg( } else {
expr, // "unicode text©".encode("utf-8")
args,
kwargs,
checker.patch(&CheckCode::UP012),
) {
checker.add_check(check);
}
}
}
}
// f"foo{bar}".encode(*args, **kwargs)
ExprKind::JoinedStr { .. } => {
if is_default_encode(args, kwargs) {
if let Some(check) = delete_default_encode_arg_or_kwarg( if let Some(check) = delete_default_encode_arg_or_kwarg(
expr, expr,
args, args,
@ -152,7 +140,20 @@ pub fn unnecessary_encode_utf8(
} }
} }
} }
_ => {}
} }
// f"foo{bar}".encode(*args, **kwargs)
ExprKind::JoinedStr { .. } => {
if is_default_encode(args, kwargs) {
if let Some(check) = delete_default_encode_arg_or_kwarg(
expr,
args,
kwargs,
checker.patch(&CheckCode::UP012),
) {
checker.add_check(check);
}
}
}
_ => {}
} }
} }

View file

@ -49,36 +49,35 @@ pub fn unnecessary_future_import(checker: &mut Checker, stmt: &Stmt, names: &[Lo
} }
} }
if !removable_index.is_empty() { if removable_index.is_empty() {
let mut check = Check::new( return;
CheckKind::UnnecessaryFutureImport(
removable_names.into_iter().map(String::from).collect(),
),
Range::from_located(stmt),
);
if checker.patch(check.kind.code()) {
let context = checker.binding_context();
let deleted: Vec<&Stmt> = checker
.deletions
.iter()
.map(|index| checker.parents[*index])
.collect();
match fixes::remove_unnecessary_future_import(
checker.locator,
&removable_index,
checker.parents[context.defined_by],
context.defined_in.map(|index| checker.parents[index]),
&deleted,
) {
Ok(fix) => {
if fix.content.is_empty() || fix.content == "pass" {
checker.deletions.insert(context.defined_by);
}
check.amend(fix);
}
Err(e) => error!("Failed to remove __future__ import: {e}"),
}
}
checker.add_check(check);
} }
let mut check = Check::new(
CheckKind::UnnecessaryFutureImport(removable_names.into_iter().map(String::from).collect()),
Range::from_located(stmt),
);
if checker.patch(check.kind.code()) {
let context = checker.binding_context();
let deleted: Vec<&Stmt> = checker
.deletions
.iter()
.map(|index| checker.parents[*index])
.collect();
match fixes::remove_unnecessary_future_import(
checker.locator,
&removable_index,
checker.parents[context.defined_by],
context.defined_in.map(|index| checker.parents[index]),
&deleted,
) {
Ok(fix) => {
if fix.content.is_empty() || fix.content == "pass" {
checker.deletions.insert(context.defined_by);
}
check.amend(fix);
}
Err(e) => error!("Failed to remove __future__ import: {e}"),
}
}
checker.add_check(check);
} }

View file

@ -6,15 +6,16 @@ use crate::pyupgrade::checks;
/// UP011 /// UP011
pub fn unnecessary_lru_cache_params(checker: &mut Checker, decorator_list: &[Expr]) { pub fn unnecessary_lru_cache_params(checker: &mut Checker, decorator_list: &[Expr]) {
if let Some(mut check) = checks::unnecessary_lru_cache_params( let Some(mut check) = checks::unnecessary_lru_cache_params(
decorator_list, decorator_list,
checker.settings.target_version, checker.settings.target_version,
&checker.from_imports, &checker.from_imports,
&checker.import_aliases, &checker.import_aliases,
) { ) else {
if checker.patch(check.kind.code()) { return;
check.amend(Fix::deletion(check.location, check.end_location)); };
} if checker.patch(check.kind.code()) {
checker.add_check(check); check.amend(Fix::deletion(check.location, check.end_location));
} }
checker.add_check(check);
} }

View file

@ -8,31 +8,31 @@ use crate::pyupgrade::checks;
/// UP001 /// UP001
pub fn useless_metaclass_type(checker: &mut Checker, stmt: &Stmt, value: &Expr, targets: &[Expr]) { pub fn useless_metaclass_type(checker: &mut Checker, stmt: &Stmt, value: &Expr, targets: &[Expr]) {
if let Some(mut check) = let Some(mut check) =
checks::useless_metaclass_type(targets, value, Range::from_located(stmt)) checks::useless_metaclass_type(targets, value, Range::from_located(stmt)) else {
{ return;
if checker.patch(check.kind.code()) { };
let context = checker.binding_context(); if checker.patch(check.kind.code()) {
let deleted: Vec<&Stmt> = checker let context = checker.binding_context();
.deletions let deleted: Vec<&Stmt> = checker
.iter() .deletions
.map(|index| checker.parents[*index]) .iter()
.collect(); .map(|index| checker.parents[*index])
.collect();
match helpers::remove_stmt( match helpers::remove_stmt(
checker.parents[context.defined_by], checker.parents[context.defined_by],
context.defined_in.map(|index| checker.parents[index]), context.defined_in.map(|index| checker.parents[index]),
&deleted, &deleted,
) { ) {
Ok(fix) => { Ok(fix) => {
if fix.content.is_empty() || fix.content == "pass" { if fix.content.is_empty() || fix.content == "pass" {
checker.deletions.insert(context.defined_by); checker.deletions.insert(context.defined_by);
}
check.amend(fix);
} }
Err(e) => error!("Failed to fix remove metaclass type: {e}"), check.amend(fix);
} }
Err(e) => error!("Failed to fix remove metaclass type: {e}"),
} }
checker.add_check(check);
} }
checker.add_check(check);
} }

View file

@ -13,18 +13,19 @@ pub fn useless_object_inheritance(
keywords: &[Keyword], keywords: &[Keyword],
) { ) {
let scope = checker.current_scope(); let scope = checker.current_scope();
if let Some(mut check) = checks::useless_object_inheritance(name, bases, scope) { let Some(mut check) = checks::useless_object_inheritance(name, bases, scope) else {
if checker.patch(check.kind.code()) { return;
if let Some(fix) = pyupgrade::fixes::remove_class_def_base( };
checker.locator, if checker.patch(check.kind.code()) {
stmt.location, if let Some(fix) = pyupgrade::fixes::remove_class_def_base(
check.location, checker.locator,
bases, stmt.location,
keywords, check.location,
) { bases,
check.amend(fix); keywords,
} ) {
check.amend(fix);
} }
checker.add_check(check);
} }
checker.add_check(check);
} }