mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-26 20:10:09 +00:00
Run nightly Clippy over the Ruff repo (#5670)
## Summary
This is the result of running `cargo +nightly clippy --workspace
--all-targets --all-features -- -D warnings` and fixing all violations.
Just wanted to see if there were any interesting new checks on nightly
👀
This commit is contained in:
parent
e7e2f44440
commit
4dee49d6fa
40 changed files with 220 additions and 470 deletions
|
@ -317,10 +317,10 @@ mod tests {
|
||||||
Some(TextSize::from(6))
|
Some(TextSize::from(6))
|
||||||
);
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r"
|
||||||
x = 1 \
|
x = 1 \
|
||||||
; y = 1
|
; y = 1
|
||||||
"#
|
"
|
||||||
.trim();
|
.trim();
|
||||||
let program = Suite::parse(contents, "<filename>")?;
|
let program = Suite::parse(contents, "<filename>")?;
|
||||||
let stmt = program.first().unwrap();
|
let stmt = program.first().unwrap();
|
||||||
|
@ -349,10 +349,10 @@ x = 1 \
|
||||||
TextSize::from(6)
|
TextSize::from(6)
|
||||||
);
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r"
|
||||||
x = 1 \
|
x = 1 \
|
||||||
; y = 1
|
; y = 1
|
||||||
"#
|
"
|
||||||
.trim();
|
.trim();
|
||||||
let locator = Locator::new(contents);
|
let locator = Locator::new(contents);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|
|
@ -776,7 +776,7 @@ where
|
||||||
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt, self.locator);
|
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt, self.locator);
|
||||||
}
|
}
|
||||||
if self.enabled(Rule::GlobalStatement) {
|
if self.enabled(Rule::GlobalStatement) {
|
||||||
for name in names.iter() {
|
for name in names {
|
||||||
if let Some(asname) = name.asname.as_ref() {
|
if let Some(asname) = name.asname.as_ref() {
|
||||||
pylint::rules::global_statement(self, asname);
|
pylint::rules::global_statement(self, asname);
|
||||||
} else {
|
} else {
|
||||||
|
@ -972,7 +972,7 @@ where
|
||||||
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt, self.locator);
|
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt, self.locator);
|
||||||
}
|
}
|
||||||
if self.enabled(Rule::GlobalStatement) {
|
if self.enabled(Rule::GlobalStatement) {
|
||||||
for name in names.iter() {
|
for name in names {
|
||||||
if let Some(asname) = name.asname.as_ref() {
|
if let Some(asname) = name.asname.as_ref() {
|
||||||
pylint::rules::global_statement(self, asname);
|
pylint::rules::global_statement(self, asname);
|
||||||
} else {
|
} else {
|
||||||
|
@ -1617,7 +1617,7 @@ where
|
||||||
flake8_bandit::rules::assign_hardcoded_password_string(self, value, targets);
|
flake8_bandit::rules::assign_hardcoded_password_string(self, value, targets);
|
||||||
}
|
}
|
||||||
if self.enabled(Rule::GlobalStatement) {
|
if self.enabled(Rule::GlobalStatement) {
|
||||||
for target in targets.iter() {
|
for target in targets {
|
||||||
if let Expr::Name(ast::ExprName { id, .. }) = target {
|
if let Expr::Name(ast::ExprName { id, .. }) = target {
|
||||||
pylint::rules::global_statement(self, id);
|
pylint::rules::global_statement(self, id);
|
||||||
}
|
}
|
||||||
|
@ -1749,7 +1749,7 @@ where
|
||||||
}
|
}
|
||||||
Stmt::Delete(ast::StmtDelete { targets, range: _ }) => {
|
Stmt::Delete(ast::StmtDelete { targets, range: _ }) => {
|
||||||
if self.enabled(Rule::GlobalStatement) {
|
if self.enabled(Rule::GlobalStatement) {
|
||||||
for target in targets.iter() {
|
for target in targets {
|
||||||
if let Expr::Name(ast::ExprName { id, .. }) = target {
|
if let Expr::Name(ast::ExprName { id, .. }) = target {
|
||||||
pylint::rules::global_statement(self, id);
|
pylint::rules::global_statement(self, id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -427,22 +427,22 @@ ghi
|
||||||
NoqaMapping::from_iter([TextRange::new(TextSize::from(6), TextSize::from(28))])
|
NoqaMapping::from_iter([TextRange::new(TextSize::from(6), TextSize::from(28))])
|
||||||
);
|
);
|
||||||
|
|
||||||
let contents = r#"x = \
|
let contents = r"x = \
|
||||||
1"#;
|
1";
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
noqa_mappings(contents),
|
noqa_mappings(contents),
|
||||||
NoqaMapping::from_iter([TextRange::new(TextSize::from(0), TextSize::from(6))])
|
NoqaMapping::from_iter([TextRange::new(TextSize::from(0), TextSize::from(6))])
|
||||||
);
|
);
|
||||||
|
|
||||||
let contents = r#"from foo import \
|
let contents = r"from foo import \
|
||||||
bar as baz, \
|
bar as baz, \
|
||||||
qux as quux"#;
|
qux as quux";
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
noqa_mappings(contents),
|
noqa_mappings(contents),
|
||||||
NoqaMapping::from_iter([TextRange::new(TextSize::from(0), TextSize::from(36))])
|
NoqaMapping::from_iter([TextRange::new(TextSize::from(0), TextSize::from(36))])
|
||||||
);
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r"
|
||||||
# Foo
|
# Foo
|
||||||
from foo import \
|
from foo import \
|
||||||
bar as baz, \
|
bar as baz, \
|
||||||
|
@ -450,7 +450,7 @@ from foo import \
|
||||||
x = \
|
x = \
|
||||||
1
|
1
|
||||||
y = \
|
y = \
|
||||||
2"#;
|
2";
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
noqa_mappings(contents),
|
noqa_mappings(contents),
|
||||||
NoqaMapping::from_iter([
|
NoqaMapping::from_iter([
|
||||||
|
|
|
@ -51,7 +51,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn output() {
|
fn output() {
|
||||||
let mut emitter = AzureEmitter::default();
|
let mut emitter = AzureEmitter;
|
||||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||||
|
|
||||||
assert_snapshot!(content);
|
assert_snapshot!(content);
|
||||||
|
|
|
@ -66,7 +66,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn output() {
|
fn output() {
|
||||||
let mut emitter = GithubEmitter::default();
|
let mut emitter = GithubEmitter;
|
||||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||||
|
|
||||||
assert_snapshot!(content);
|
assert_snapshot!(content);
|
||||||
|
|
|
@ -108,7 +108,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn output() {
|
fn output() {
|
||||||
let mut emitter = JsonEmitter::default();
|
let mut emitter = JsonEmitter;
|
||||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||||
|
|
||||||
assert_snapshot!(content);
|
assert_snapshot!(content);
|
||||||
|
|
|
@ -24,14 +24,14 @@ impl Emitter for JsonLinesEmitter {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::message::json_lines::JsonLinesEmitter;
|
|
||||||
use insta::assert_snapshot;
|
use insta::assert_snapshot;
|
||||||
|
|
||||||
|
use crate::message::json_lines::JsonLinesEmitter;
|
||||||
use crate::message::tests::{capture_emitter_output, create_messages};
|
use crate::message::tests::{capture_emitter_output, create_messages};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn output() {
|
fn output() {
|
||||||
let mut emitter = JsonLinesEmitter::default();
|
let mut emitter = JsonLinesEmitter;
|
||||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||||
|
|
||||||
assert_snapshot!(content);
|
assert_snapshot!(content);
|
||||||
|
|
|
@ -93,7 +93,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn output() {
|
fn output() {
|
||||||
let mut emitter = JunitEmitter::default();
|
let mut emitter = JunitEmitter;
|
||||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||||
|
|
||||||
assert_snapshot!(content);
|
assert_snapshot!(content);
|
||||||
|
|
|
@ -49,7 +49,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn output() {
|
fn output() {
|
||||||
let mut emitter = PylintEmitter::default();
|
let mut emitter = PylintEmitter;
|
||||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||||
|
|
||||||
assert_snapshot!(content);
|
assert_snapshot!(content);
|
||||||
|
|
|
@ -350,7 +350,7 @@ fn is_wildcard_command(expr: &Expr) -> bool {
|
||||||
if let Expr::List(ast::ExprList { elts, .. }) = expr {
|
if let Expr::List(ast::ExprList { elts, .. }) = expr {
|
||||||
let mut has_star = false;
|
let mut has_star = false;
|
||||||
let mut has_command = false;
|
let mut has_command = false;
|
||||||
for elt in elts.iter() {
|
for elt in elts {
|
||||||
if let Some(text) = string_literal(elt) {
|
if let Some(text) = string_literal(elt) {
|
||||||
has_star |= text.contains('*');
|
has_star |= text.contains('*');
|
||||||
has_command |= text.contains("chown")
|
has_command |= text.contains("chown")
|
||||||
|
|
|
@ -59,12 +59,7 @@ impl From<Options> for Settings {
|
||||||
.hardcoded_tmp_directory
|
.hardcoded_tmp_directory
|
||||||
.unwrap_or_else(default_tmp_dirs)
|
.unwrap_or_else(default_tmp_dirs)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(
|
.chain(options.hardcoded_tmp_directory_extend.unwrap_or_default())
|
||||||
options
|
|
||||||
.hardcoded_tmp_directory_extend
|
|
||||||
.unwrap_or_default()
|
|
||||||
.into_iter(),
|
|
||||||
)
|
|
||||||
.collect(),
|
.collect(),
|
||||||
check_typed_exception: options.check_typed_exception.unwrap_or(false),
|
check_typed_exception: options.check_typed_exception.unwrap_or(false),
|
||||||
}
|
}
|
||||||
|
|
|
@ -351,7 +351,7 @@ pub(crate) fn reuse_of_groupby_generator(
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let mut finder = GroupNameFinder::new(group_name);
|
let mut finder = GroupNameFinder::new(group_name);
|
||||||
for stmt in body.iter() {
|
for stmt in body {
|
||||||
finder.visit_stmt(stmt);
|
finder.visit_stmt(stmt);
|
||||||
}
|
}
|
||||||
for expr in finder.exprs {
|
for expr in finder.exprs {
|
||||||
|
|
|
@ -512,7 +512,7 @@ fn pad_expression(content: String, range: TextRange, checker: &Checker) -> Strin
|
||||||
// If the expression is immediately preceded by an opening brace, then
|
// If the expression is immediately preceded by an opening brace, then
|
||||||
// we need to add a space before the expression.
|
// we need to add a space before the expression.
|
||||||
let prefix = checker.locator.up_to(range.start());
|
let prefix = checker.locator.up_to(range.start());
|
||||||
let left_pad = matches!(prefix.chars().rev().next(), Some('{'));
|
let left_pad = matches!(prefix.chars().next_back(), Some('{'));
|
||||||
|
|
||||||
// If the expression is immediately preceded by an opening brace, then
|
// If the expression is immediately preceded by an opening brace, then
|
||||||
// we need to add a space before the expression.
|
// we need to add a space before the expression.
|
||||||
|
|
|
@ -58,18 +58,18 @@ pub(crate) fn all_with_model_form(
|
||||||
{
|
{
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
for element in body.iter() {
|
for element in body {
|
||||||
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
|
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
if name != "Meta" {
|
if name != "Meta" {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
for element in body.iter() {
|
for element in body {
|
||||||
let Stmt::Assign(ast::StmtAssign { targets, value, .. }) = element else {
|
let Stmt::Assign(ast::StmtAssign { targets, value, .. }) = element else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
for target in targets.iter() {
|
for target in targets {
|
||||||
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
|
@ -56,18 +56,18 @@ pub(crate) fn exclude_with_model_form(
|
||||||
{
|
{
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
for element in body.iter() {
|
for element in body {
|
||||||
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
|
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
if name != "Meta" {
|
if name != "Meta" {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
for element in body.iter() {
|
for element in body {
|
||||||
let Stmt::Assign(ast::StmtAssign { targets, .. }) = element else {
|
let Stmt::Assign(ast::StmtAssign { targets, .. }) = element else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
for target in targets.iter() {
|
for target in targets {
|
||||||
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
|
@ -81,7 +81,7 @@ fn has_dunder_method(body: &[Stmt]) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_non_abstract_model(bases: &[Expr], body: &[Stmt], semantic: &SemanticModel) -> bool {
|
fn is_non_abstract_model(bases: &[Expr], body: &[Stmt], semantic: &SemanticModel) -> bool {
|
||||||
for base in bases.iter() {
|
for base in bases {
|
||||||
if is_model_abstract(body) {
|
if is_model_abstract(body) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -94,18 +94,18 @@ fn is_non_abstract_model(bases: &[Expr], body: &[Stmt], semantic: &SemanticModel
|
||||||
|
|
||||||
/// Check if class is abstract, in terms of Django model inheritance.
|
/// Check if class is abstract, in terms of Django model inheritance.
|
||||||
fn is_model_abstract(body: &[Stmt]) -> bool {
|
fn is_model_abstract(body: &[Stmt]) -> bool {
|
||||||
for element in body.iter() {
|
for element in body {
|
||||||
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
|
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
if name != "Meta" {
|
if name != "Meta" {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
for element in body.iter() {
|
for element in body {
|
||||||
let Stmt::Assign(ast::StmtAssign { targets, value, .. }) = element else {
|
let Stmt::Assign(ast::StmtAssign { targets, value, .. }) = element else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
for target in targets.iter() {
|
for target in targets {
|
||||||
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
|
@ -53,7 +53,7 @@ impl Violation for DjangoNullableModelStringField {
|
||||||
|
|
||||||
/// DJ001
|
/// DJ001
|
||||||
pub(crate) fn nullable_model_string_field(checker: &mut Checker, body: &[Stmt]) {
|
pub(crate) fn nullable_model_string_field(checker: &mut Checker, body: &[Stmt]) {
|
||||||
for statement in body.iter() {
|
for statement in body {
|
||||||
let Stmt::Assign(ast::StmtAssign { value, .. }) = statement else {
|
let Stmt::Assign(ast::StmtAssign { value, .. }) = statement else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
@ -87,7 +87,7 @@ fn is_nullable_field<'a>(checker: &'a Checker, value: &'a Expr) -> Option<&'a st
|
||||||
let mut null_key = false;
|
let mut null_key = false;
|
||||||
let mut blank_key = false;
|
let mut blank_key = false;
|
||||||
let mut unique_key = false;
|
let mut unique_key = false;
|
||||||
for keyword in keywords.iter() {
|
for keyword in keywords {
|
||||||
let Some(argument) = &keyword.arg else {
|
let Some(argument) = &keyword.arg else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
|
@ -156,7 +156,7 @@ pub(crate) fn unordered_body_content_in_model(
|
||||||
// Track all the element types we've seen so far.
|
// Track all the element types we've seen so far.
|
||||||
let mut element_types = Vec::new();
|
let mut element_types = Vec::new();
|
||||||
let mut prev_element_type = None;
|
let mut prev_element_type = None;
|
||||||
for element in body.iter() {
|
for element in body {
|
||||||
let Some(element_type) = get_element_type(element, checker.semantic()) else {
|
let Some(element_type) = get_element_type(element, checker.semantic()) else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
|
@ -57,12 +57,7 @@ impl From<Options> for Settings {
|
||||||
.function_names
|
.function_names
|
||||||
.unwrap_or_else(default_func_names)
|
.unwrap_or_else(default_func_names)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(
|
.chain(options.extend_function_names.unwrap_or_default())
|
||||||
options
|
|
||||||
.extend_function_names
|
|
||||||
.unwrap_or_default()
|
|
||||||
.into_iter(),
|
|
||||||
)
|
|
||||||
.collect(),
|
.collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -322,7 +322,7 @@ fn strings(locator: &Locator, sequence: &[TextRange], settings: &Settings) -> Ve
|
||||||
string_contents.contains(good_single(quotes_settings.inline_quotes))
|
string_contents.contains(good_single(quotes_settings.inline_quotes))
|
||||||
});
|
});
|
||||||
|
|
||||||
for (range, trivia) in sequence.iter().zip(trivia.into_iter()) {
|
for (range, trivia) in sequence.iter().zip(trivia) {
|
||||||
if trivia.is_multiline {
|
if trivia.is_multiline {
|
||||||
// If our string is or contains a known good string, ignore it.
|
// If our string is or contains a known good string, ignore it.
|
||||||
if trivia
|
if trivia
|
||||||
|
|
|
@ -227,9 +227,9 @@ impl Violation for MissingSpaceAfterTodoColon {
|
||||||
|
|
||||||
static ISSUE_LINK_REGEX_SET: Lazy<RegexSet> = Lazy::new(|| {
|
static ISSUE_LINK_REGEX_SET: Lazy<RegexSet> = Lazy::new(|| {
|
||||||
RegexSet::new([
|
RegexSet::new([
|
||||||
r#"^#\s*(http|https)://.*"#, // issue link
|
r"^#\s*(http|https)://.*", // issue link
|
||||||
r#"^#\s*\d+$"#, // issue code - like "003"
|
r"^#\s*\d+$", // issue code - like "003"
|
||||||
r#"^#\s*[A-Z]{1,6}\-?\d+$"#, // issue code - like "TD003"
|
r"^#\s*[A-Z]{1,6}\-?\d+$", // issue code - like "TD003"
|
||||||
])
|
])
|
||||||
.unwrap()
|
.unwrap()
|
||||||
});
|
});
|
||||||
|
|
|
@ -38,7 +38,7 @@ pub(crate) fn runtime_evaluated(
|
||||||
|
|
||||||
fn runtime_evaluated_base_class(base_classes: &[String], semantic: &SemanticModel) -> bool {
|
fn runtime_evaluated_base_class(base_classes: &[String], semantic: &SemanticModel) -> bool {
|
||||||
if let ScopeKind::Class(ast::StmtClassDef { bases, .. }) = &semantic.scope().kind {
|
if let ScopeKind::Class(ast::StmtClassDef { bases, .. }) = &semantic.scope().kind {
|
||||||
for base in bases.iter() {
|
for base in bases {
|
||||||
if let Some(call_path) = semantic.resolve_call_path(base) {
|
if let Some(call_path) = semantic.resolve_call_path(base) {
|
||||||
if base_classes
|
if base_classes
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -54,7 +54,7 @@ fn runtime_evaluated_base_class(base_classes: &[String], semantic: &SemanticMode
|
||||||
|
|
||||||
fn runtime_evaluated_decorators(decorators: &[String], semantic: &SemanticModel) -> bool {
|
fn runtime_evaluated_decorators(decorators: &[String], semantic: &SemanticModel) -> bool {
|
||||||
if let ScopeKind::Class(ast::StmtClassDef { decorator_list, .. }) = &semantic.scope().kind {
|
if let ScopeKind::Class(ast::StmtClassDef { decorator_list, .. }) = &semantic.scope().kind {
|
||||||
for decorator in decorator_list.iter() {
|
for decorator in decorator_list {
|
||||||
if let Some(call_path) = semantic.resolve_call_path(map_callable(&decorator.expression))
|
if let Some(call_path) = semantic.resolve_call_path(map_callable(&decorator.expression))
|
||||||
{
|
{
|
||||||
if decorators
|
if decorators
|
||||||
|
|
|
@ -47,7 +47,7 @@ pub(crate) fn order_imports<'a>(
|
||||||
)
|
)
|
||||||
.chain(
|
.chain(
|
||||||
// Include all star imports.
|
// Include all star imports.
|
||||||
block.import_from_star.into_iter(),
|
block.import_from_star,
|
||||||
)
|
)
|
||||||
.map(
|
.map(
|
||||||
|(
|
|(
|
||||||
|
|
|
@ -93,7 +93,7 @@ pub(crate) fn not_tests(
|
||||||
if !matches!(&ops[..], [CmpOp::In | CmpOp::Is]) {
|
if !matches!(&ops[..], [CmpOp::In | CmpOp::Is]) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
for op in ops.iter() {
|
for op in ops {
|
||||||
match op {
|
match op {
|
||||||
CmpOp::In => {
|
CmpOp::In => {
|
||||||
if check_not_in {
|
if check_not_in {
|
||||||
|
|
|
@ -58,7 +58,7 @@ impl AlwaysAutofixableViolation for LRUCacheWithMaxsizeNone {
|
||||||
|
|
||||||
/// UP033
|
/// UP033
|
||||||
pub(crate) fn lru_cache_with_maxsize_none(checker: &mut Checker, decorator_list: &[Decorator]) {
|
pub(crate) fn lru_cache_with_maxsize_none(checker: &mut Checker, decorator_list: &[Decorator]) {
|
||||||
for decorator in decorator_list.iter() {
|
for decorator in decorator_list {
|
||||||
let Expr::Call(ast::ExprCall {
|
let Expr::Call(ast::ExprCall {
|
||||||
func,
|
func,
|
||||||
args,
|
args,
|
||||||
|
|
|
@ -56,7 +56,7 @@ impl AlwaysAutofixableViolation for LRUCacheWithoutParameters {
|
||||||
|
|
||||||
/// UP011
|
/// UP011
|
||||||
pub(crate) fn lru_cache_without_parameters(checker: &mut Checker, decorator_list: &[Decorator]) {
|
pub(crate) fn lru_cache_without_parameters(checker: &mut Checker, decorator_list: &[Decorator]) {
|
||||||
for decorator in decorator_list.iter() {
|
for decorator in decorator_list {
|
||||||
let Expr::Call(ast::ExprCall {
|
let Expr::Call(ast::ExprCall {
|
||||||
func,
|
func,
|
||||||
args,
|
args,
|
||||||
|
|
|
@ -258,7 +258,7 @@ impl Configuration {
|
||||||
rule_selections: config
|
rule_selections: config
|
||||||
.rule_selections
|
.rule_selections
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(self.rule_selections.into_iter())
|
.chain(self.rule_selections)
|
||||||
.collect(),
|
.collect(),
|
||||||
allowed_confusables: self.allowed_confusables.or(config.allowed_confusables),
|
allowed_confusables: self.allowed_confusables.or(config.allowed_confusables),
|
||||||
builtins: self.builtins.or(config.builtins),
|
builtins: self.builtins.or(config.builtins),
|
||||||
|
@ -269,17 +269,17 @@ impl Configuration {
|
||||||
extend_exclude: config
|
extend_exclude: config
|
||||||
.extend_exclude
|
.extend_exclude
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(self.extend_exclude.into_iter())
|
.chain(self.extend_exclude)
|
||||||
.collect(),
|
.collect(),
|
||||||
extend_include: config
|
extend_include: config
|
||||||
.extend_include
|
.extend_include
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(self.extend_include.into_iter())
|
.chain(self.extend_include)
|
||||||
.collect(),
|
.collect(),
|
||||||
extend_per_file_ignores: config
|
extend_per_file_ignores: config
|
||||||
.extend_per_file_ignores
|
.extend_per_file_ignores
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(self.extend_per_file_ignores.into_iter())
|
.chain(self.extend_per_file_ignores)
|
||||||
.collect(),
|
.collect(),
|
||||||
external: self.external.or(config.external),
|
external: self.external.or(config.external),
|
||||||
fix: self.fix.or(config.fix),
|
fix: self.fix.or(config.fix),
|
||||||
|
|
|
@ -153,7 +153,7 @@ impl IntoIterator for OptionGroup {
|
||||||
|
|
||||||
impl Display for OptionGroup {
|
impl Display for OptionGroup {
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
for (name, _) in self.iter() {
|
for (name, _) in self {
|
||||||
writeln!(f, "{name}")?;
|
writeln!(f, "{name}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -181,13 +181,13 @@ impl Printer {
|
||||||
|
|
||||||
match self.format {
|
match self.format {
|
||||||
SerializationFormat::Json => {
|
SerializationFormat::Json => {
|
||||||
JsonEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
JsonEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||||
}
|
}
|
||||||
SerializationFormat::JsonLines => {
|
SerializationFormat::JsonLines => {
|
||||||
JsonLinesEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
JsonLinesEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||||
}
|
}
|
||||||
SerializationFormat::Junit => {
|
SerializationFormat::Junit => {
|
||||||
JunitEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
JunitEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||||
}
|
}
|
||||||
SerializationFormat::Text => {
|
SerializationFormat::Text => {
|
||||||
TextEmitter::default()
|
TextEmitter::default()
|
||||||
|
@ -222,16 +222,16 @@ impl Printer {
|
||||||
self.write_summary_text(writer, diagnostics)?;
|
self.write_summary_text(writer, diagnostics)?;
|
||||||
}
|
}
|
||||||
SerializationFormat::Github => {
|
SerializationFormat::Github => {
|
||||||
GithubEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
GithubEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||||
}
|
}
|
||||||
SerializationFormat::Gitlab => {
|
SerializationFormat::Gitlab => {
|
||||||
GitlabEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
GitlabEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
||||||
}
|
}
|
||||||
SerializationFormat::Pylint => {
|
SerializationFormat::Pylint => {
|
||||||
PylintEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
PylintEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||||
}
|
}
|
||||||
SerializationFormat::Azure => {
|
SerializationFormat::Azure => {
|
||||||
AzureEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
AzureEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -66,7 +66,7 @@ impl Fix {
|
||||||
)]
|
)]
|
||||||
pub fn unspecified_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
pub fn unspecified_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
edits: std::iter::once(edit).chain(rest.into_iter()).collect(),
|
edits: std::iter::once(edit).chain(rest).collect(),
|
||||||
applicability: Applicability::Unspecified,
|
applicability: Applicability::Unspecified,
|
||||||
isolation_level: IsolationLevel::default(),
|
isolation_level: IsolationLevel::default(),
|
||||||
}
|
}
|
||||||
|
@ -84,7 +84,7 @@ impl Fix {
|
||||||
/// Create a new [`Fix`] with [automatic applicability](Applicability::Automatic) from multiple [`Edit`] elements.
|
/// Create a new [`Fix`] with [automatic applicability](Applicability::Automatic) from multiple [`Edit`] elements.
|
||||||
pub fn automatic_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
pub fn automatic_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
edits: std::iter::once(edit).chain(rest.into_iter()).collect(),
|
edits: std::iter::once(edit).chain(rest).collect(),
|
||||||
applicability: Applicability::Automatic,
|
applicability: Applicability::Automatic,
|
||||||
isolation_level: IsolationLevel::default(),
|
isolation_level: IsolationLevel::default(),
|
||||||
}
|
}
|
||||||
|
@ -102,7 +102,7 @@ impl Fix {
|
||||||
/// Create a new [`Fix`] with [suggested applicability](Applicability::Suggested) from multiple [`Edit`] elements.
|
/// Create a new [`Fix`] with [suggested applicability](Applicability::Suggested) from multiple [`Edit`] elements.
|
||||||
pub fn suggested_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
pub fn suggested_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
edits: std::iter::once(edit).chain(rest.into_iter()).collect(),
|
edits: std::iter::once(edit).chain(rest).collect(),
|
||||||
applicability: Applicability::Suggested,
|
applicability: Applicability::Suggested,
|
||||||
isolation_level: IsolationLevel::default(),
|
isolation_level: IsolationLevel::default(),
|
||||||
}
|
}
|
||||||
|
@ -120,7 +120,7 @@ impl Fix {
|
||||||
/// Create a new [`Fix`] with [manual applicability](Applicability::Manual) from multiple [`Edit`] elements.
|
/// Create a new [`Fix`] with [manual applicability](Applicability::Manual) from multiple [`Edit`] elements.
|
||||||
pub fn manual_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
pub fn manual_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
edits: std::iter::once(edit).chain(rest.into_iter()).collect(),
|
edits: std::iter::once(edit).chain(rest).collect(),
|
||||||
applicability: Applicability::Manual,
|
applicability: Applicability::Manual,
|
||||||
isolation_level: IsolationLevel::default(),
|
isolation_level: IsolationLevel::default(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -96,7 +96,7 @@ impl<Context> Copy for Arguments<'_, Context> {}
|
||||||
|
|
||||||
impl<Context> Clone for Arguments<'_, Context> {
|
impl<Context> Clone for Arguments<'_, Context> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self(self.0)
|
*self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ pub(crate) fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenS
|
||||||
}) => {
|
}) => {
|
||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
|
|
||||||
for field in fields.named.iter() {
|
for field in &fields.named {
|
||||||
let docs: Vec<&Attribute> = field
|
let docs: Vec<&Attribute> = field
|
||||||
.attrs
|
.attrs
|
||||||
.iter()
|
.iter()
|
||||||
|
|
|
@ -201,7 +201,7 @@ if True:
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r"
|
||||||
x = 1; import sys
|
x = 1; import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -215,7 +215,7 @@ if True:
|
||||||
|
|
||||||
x = 1; \
|
x = 1; \
|
||||||
import os
|
import os
|
||||||
"#
|
"
|
||||||
.trim();
|
.trim();
|
||||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||||
let indexer = Indexer::from_tokens(lxr.as_slice(), &Locator::new(contents));
|
let indexer = Indexer::from_tokens(lxr.as_slice(), &Locator::new(contents));
|
||||||
|
|
|
@ -28,7 +28,7 @@ impl<'a, T> AsRef<T> for RefEquality<'a, T> {
|
||||||
|
|
||||||
impl<'a, T> Clone for RefEquality<'a, T> {
|
impl<'a, T> Clone for RefEquality<'a, T> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self(self.0)
|
*self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -136,7 +136,7 @@ for node in nodes:
|
||||||
fn format(&self) -> Self::Format<'_> {{
|
fn format(&self) -> Self::Format<'_> {{
|
||||||
FormatRefWithRule::new(
|
FormatRefWithRule::new(
|
||||||
self,
|
self,
|
||||||
crate::{groups[group_for_node(node)]}::{to_camel_case(node)}::Format{node}::default(),
|
crate::{groups[group_for_node(node)]}::{to_camel_case(node)}::Format{node},
|
||||||
)
|
)
|
||||||
}}
|
}}
|
||||||
}}
|
}}
|
||||||
|
@ -149,7 +149,7 @@ for node in nodes:
|
||||||
fn into_format(self) -> Self::Format {{
|
fn into_format(self) -> Self::Format {{
|
||||||
FormatOwnedWithRule::new(
|
FormatOwnedWithRule::new(
|
||||||
self,
|
self,
|
||||||
crate::{groups[group_for_node(node)]}::{to_camel_case(node)}::Format{node}::default(),
|
crate::{groups[group_for_node(node)]}::{to_camel_case(node)}::Format{node},
|
||||||
)
|
)
|
||||||
}}
|
}}
|
||||||
}}
|
}}
|
||||||
|
|
|
@ -436,7 +436,7 @@ mod tests {
|
||||||
|
|
||||||
let comment_ranges = comment_ranges.finish();
|
let comment_ranges = comment_ranges.finish();
|
||||||
|
|
||||||
let parsed = parse_tokens(tokens.into_iter(), Mode::Module, "test.py")
|
let parsed = parse_tokens(tokens, Mode::Module, "test.py")
|
||||||
.expect("Expect source to be valid Python");
|
.expect("Expect source to be valid Python");
|
||||||
|
|
||||||
CommentsTestCase {
|
CommentsTestCase {
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -26,7 +26,7 @@ impl<'ast> AsFormat<PyFormatContext<'ast>> for Mod {
|
||||||
type Format<'a> = FormatRefWithRule<'a, Mod, FormatMod, PyFormatContext<'ast>>;
|
type Format<'a> = FormatRefWithRule<'a, Mod, FormatMod, PyFormatContext<'ast>>;
|
||||||
|
|
||||||
fn format(&self) -> Self::Format<'_> {
|
fn format(&self) -> Self::Format<'_> {
|
||||||
FormatRefWithRule::new(self, FormatMod::default())
|
FormatRefWithRule::new(self, FormatMod)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,6 +34,6 @@ impl<'ast> IntoFormat<PyFormatContext<'ast>> for Mod {
|
||||||
type Format = FormatOwnedWithRule<Mod, FormatMod, PyFormatContext<'ast>>;
|
type Format = FormatOwnedWithRule<Mod, FormatMod, PyFormatContext<'ast>>;
|
||||||
|
|
||||||
fn into_format(self) -> Self::Format {
|
fn into_format(self) -> Self::Format {
|
||||||
FormatOwnedWithRule::new(self, FormatMod::default())
|
FormatOwnedWithRule::new(self, FormatMod)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,7 +72,7 @@ impl<'ast> AsFormat<PyFormatContext<'ast>> for Stmt {
|
||||||
type Format<'a> = FormatRefWithRule<'a, Stmt, FormatStmt, PyFormatContext<'ast>>;
|
type Format<'a> = FormatRefWithRule<'a, Stmt, FormatStmt, PyFormatContext<'ast>>;
|
||||||
|
|
||||||
fn format(&self) -> Self::Format<'_> {
|
fn format(&self) -> Self::Format<'_> {
|
||||||
FormatRefWithRule::new(self, FormatStmt::default())
|
FormatRefWithRule::new(self, FormatStmt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,6 +80,6 @@ impl<'ast> IntoFormat<PyFormatContext<'ast>> for Stmt {
|
||||||
type Format = FormatOwnedWithRule<Stmt, FormatStmt, PyFormatContext<'ast>>;
|
type Format = FormatOwnedWithRule<Stmt, FormatStmt, PyFormatContext<'ast>>;
|
||||||
|
|
||||||
fn into_format(self) -> Self::Format {
|
fn into_format(self) -> Self::Format {
|
||||||
FormatOwnedWithRule::new(self, FormatStmt::default())
|
FormatOwnedWithRule::new(self, FormatStmt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -124,7 +124,7 @@ impl<'def, 'ast> AsFormat<PyFormatContext<'ast>> for AnyFunctionDefinition<'def>
|
||||||
> where Self: 'a;
|
> where Self: 'a;
|
||||||
|
|
||||||
fn format(&self) -> Self::Format<'_> {
|
fn format(&self) -> Self::Format<'_> {
|
||||||
FormatRefWithRule::new(self, FormatAnyFunctionDef::default())
|
FormatRefWithRule::new(self, FormatAnyFunctionDef)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue