mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-02 18:02:23 +00:00
Run nightly Clippy over the Ruff repo (#5670)
## Summary
This is the result of running `cargo +nightly clippy --workspace
--all-targets --all-features -- -D warnings` and fixing all violations.
Just wanted to see if there were any interesting new checks on nightly
👀
This commit is contained in:
parent
e7e2f44440
commit
4dee49d6fa
40 changed files with 220 additions and 470 deletions
|
@ -317,10 +317,10 @@ mod tests {
|
|||
Some(TextSize::from(6))
|
||||
);
|
||||
|
||||
let contents = r#"
|
||||
let contents = r"
|
||||
x = 1 \
|
||||
; y = 1
|
||||
"#
|
||||
"
|
||||
.trim();
|
||||
let program = Suite::parse(contents, "<filename>")?;
|
||||
let stmt = program.first().unwrap();
|
||||
|
@ -349,10 +349,10 @@ x = 1 \
|
|||
TextSize::from(6)
|
||||
);
|
||||
|
||||
let contents = r#"
|
||||
let contents = r"
|
||||
x = 1 \
|
||||
; y = 1
|
||||
"#
|
||||
"
|
||||
.trim();
|
||||
let locator = Locator::new(contents);
|
||||
assert_eq!(
|
||||
|
|
|
@ -776,7 +776,7 @@ where
|
|||
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt, self.locator);
|
||||
}
|
||||
if self.enabled(Rule::GlobalStatement) {
|
||||
for name in names.iter() {
|
||||
for name in names {
|
||||
if let Some(asname) = name.asname.as_ref() {
|
||||
pylint::rules::global_statement(self, asname);
|
||||
} else {
|
||||
|
@ -972,7 +972,7 @@ where
|
|||
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt, self.locator);
|
||||
}
|
||||
if self.enabled(Rule::GlobalStatement) {
|
||||
for name in names.iter() {
|
||||
for name in names {
|
||||
if let Some(asname) = name.asname.as_ref() {
|
||||
pylint::rules::global_statement(self, asname);
|
||||
} else {
|
||||
|
@ -1617,7 +1617,7 @@ where
|
|||
flake8_bandit::rules::assign_hardcoded_password_string(self, value, targets);
|
||||
}
|
||||
if self.enabled(Rule::GlobalStatement) {
|
||||
for target in targets.iter() {
|
||||
for target in targets {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target {
|
||||
pylint::rules::global_statement(self, id);
|
||||
}
|
||||
|
@ -1749,7 +1749,7 @@ where
|
|||
}
|
||||
Stmt::Delete(ast::StmtDelete { targets, range: _ }) => {
|
||||
if self.enabled(Rule::GlobalStatement) {
|
||||
for target in targets.iter() {
|
||||
for target in targets {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target {
|
||||
pylint::rules::global_statement(self, id);
|
||||
}
|
||||
|
|
|
@ -427,22 +427,22 @@ ghi
|
|||
NoqaMapping::from_iter([TextRange::new(TextSize::from(6), TextSize::from(28))])
|
||||
);
|
||||
|
||||
let contents = r#"x = \
|
||||
1"#;
|
||||
let contents = r"x = \
|
||||
1";
|
||||
assert_eq!(
|
||||
noqa_mappings(contents),
|
||||
NoqaMapping::from_iter([TextRange::new(TextSize::from(0), TextSize::from(6))])
|
||||
);
|
||||
|
||||
let contents = r#"from foo import \
|
||||
let contents = r"from foo import \
|
||||
bar as baz, \
|
||||
qux as quux"#;
|
||||
qux as quux";
|
||||
assert_eq!(
|
||||
noqa_mappings(contents),
|
||||
NoqaMapping::from_iter([TextRange::new(TextSize::from(0), TextSize::from(36))])
|
||||
);
|
||||
|
||||
let contents = r#"
|
||||
let contents = r"
|
||||
# Foo
|
||||
from foo import \
|
||||
bar as baz, \
|
||||
|
@ -450,7 +450,7 @@ from foo import \
|
|||
x = \
|
||||
1
|
||||
y = \
|
||||
2"#;
|
||||
2";
|
||||
assert_eq!(
|
||||
noqa_mappings(contents),
|
||||
NoqaMapping::from_iter([
|
||||
|
|
|
@ -51,7 +51,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = AzureEmitter::default();
|
||||
let mut emitter = AzureEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
|
|
@ -66,7 +66,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = GithubEmitter::default();
|
||||
let mut emitter = GithubEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
|
|
@ -108,7 +108,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = JsonEmitter::default();
|
||||
let mut emitter = JsonEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
|
|
@ -24,14 +24,14 @@ impl Emitter for JsonLinesEmitter {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::message::json_lines::JsonLinesEmitter;
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use crate::message::json_lines::JsonLinesEmitter;
|
||||
use crate::message::tests::{capture_emitter_output, create_messages};
|
||||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = JsonLinesEmitter::default();
|
||||
let mut emitter = JsonLinesEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
|
|
@ -93,7 +93,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = JunitEmitter::default();
|
||||
let mut emitter = JunitEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
|
|
@ -49,7 +49,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn output() {
|
||||
let mut emitter = PylintEmitter::default();
|
||||
let mut emitter = PylintEmitter;
|
||||
let content = capture_emitter_output(&mut emitter, &create_messages());
|
||||
|
||||
assert_snapshot!(content);
|
||||
|
|
|
@ -350,7 +350,7 @@ fn is_wildcard_command(expr: &Expr) -> bool {
|
|||
if let Expr::List(ast::ExprList { elts, .. }) = expr {
|
||||
let mut has_star = false;
|
||||
let mut has_command = false;
|
||||
for elt in elts.iter() {
|
||||
for elt in elts {
|
||||
if let Some(text) = string_literal(elt) {
|
||||
has_star |= text.contains('*');
|
||||
has_command |= text.contains("chown")
|
||||
|
|
|
@ -59,12 +59,7 @@ impl From<Options> for Settings {
|
|||
.hardcoded_tmp_directory
|
||||
.unwrap_or_else(default_tmp_dirs)
|
||||
.into_iter()
|
||||
.chain(
|
||||
options
|
||||
.hardcoded_tmp_directory_extend
|
||||
.unwrap_or_default()
|
||||
.into_iter(),
|
||||
)
|
||||
.chain(options.hardcoded_tmp_directory_extend.unwrap_or_default())
|
||||
.collect(),
|
||||
check_typed_exception: options.check_typed_exception.unwrap_or(false),
|
||||
}
|
||||
|
|
|
@ -351,7 +351,7 @@ pub(crate) fn reuse_of_groupby_generator(
|
|||
return;
|
||||
}
|
||||
let mut finder = GroupNameFinder::new(group_name);
|
||||
for stmt in body.iter() {
|
||||
for stmt in body {
|
||||
finder.visit_stmt(stmt);
|
||||
}
|
||||
for expr in finder.exprs {
|
||||
|
|
|
@ -512,7 +512,7 @@ fn pad_expression(content: String, range: TextRange, checker: &Checker) -> Strin
|
|||
// If the expression is immediately preceded by an opening brace, then
|
||||
// we need to add a space before the expression.
|
||||
let prefix = checker.locator.up_to(range.start());
|
||||
let left_pad = matches!(prefix.chars().rev().next(), Some('{'));
|
||||
let left_pad = matches!(prefix.chars().next_back(), Some('{'));
|
||||
|
||||
// If the expression is immediately preceded by an opening brace, then
|
||||
// we need to add a space before the expression.
|
||||
|
|
|
@ -58,18 +58,18 @@ pub(crate) fn all_with_model_form(
|
|||
{
|
||||
return None;
|
||||
}
|
||||
for element in body.iter() {
|
||||
for element in body {
|
||||
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
|
||||
continue;
|
||||
};
|
||||
if name != "Meta" {
|
||||
continue;
|
||||
}
|
||||
for element in body.iter() {
|
||||
for element in body {
|
||||
let Stmt::Assign(ast::StmtAssign { targets, value, .. }) = element else {
|
||||
continue;
|
||||
};
|
||||
for target in targets.iter() {
|
||||
for target in targets {
|
||||
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
||||
continue;
|
||||
};
|
||||
|
|
|
@ -56,18 +56,18 @@ pub(crate) fn exclude_with_model_form(
|
|||
{
|
||||
return None;
|
||||
}
|
||||
for element in body.iter() {
|
||||
for element in body {
|
||||
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
|
||||
continue;
|
||||
};
|
||||
if name != "Meta" {
|
||||
continue;
|
||||
}
|
||||
for element in body.iter() {
|
||||
for element in body {
|
||||
let Stmt::Assign(ast::StmtAssign { targets, .. }) = element else {
|
||||
continue;
|
||||
};
|
||||
for target in targets.iter() {
|
||||
for target in targets {
|
||||
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
||||
continue;
|
||||
};
|
||||
|
|
|
@ -81,7 +81,7 @@ fn has_dunder_method(body: &[Stmt]) -> bool {
|
|||
}
|
||||
|
||||
fn is_non_abstract_model(bases: &[Expr], body: &[Stmt], semantic: &SemanticModel) -> bool {
|
||||
for base in bases.iter() {
|
||||
for base in bases {
|
||||
if is_model_abstract(body) {
|
||||
continue;
|
||||
}
|
||||
|
@ -94,18 +94,18 @@ fn is_non_abstract_model(bases: &[Expr], body: &[Stmt], semantic: &SemanticModel
|
|||
|
||||
/// Check if class is abstract, in terms of Django model inheritance.
|
||||
fn is_model_abstract(body: &[Stmt]) -> bool {
|
||||
for element in body.iter() {
|
||||
for element in body {
|
||||
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
|
||||
continue;
|
||||
};
|
||||
if name != "Meta" {
|
||||
continue;
|
||||
}
|
||||
for element in body.iter() {
|
||||
for element in body {
|
||||
let Stmt::Assign(ast::StmtAssign { targets, value, .. }) = element else {
|
||||
continue;
|
||||
};
|
||||
for target in targets.iter() {
|
||||
for target in targets {
|
||||
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
||||
continue;
|
||||
};
|
||||
|
|
|
@ -53,7 +53,7 @@ impl Violation for DjangoNullableModelStringField {
|
|||
|
||||
/// DJ001
|
||||
pub(crate) fn nullable_model_string_field(checker: &mut Checker, body: &[Stmt]) {
|
||||
for statement in body.iter() {
|
||||
for statement in body {
|
||||
let Stmt::Assign(ast::StmtAssign { value, .. }) = statement else {
|
||||
continue;
|
||||
};
|
||||
|
@ -87,7 +87,7 @@ fn is_nullable_field<'a>(checker: &'a Checker, value: &'a Expr) -> Option<&'a st
|
|||
let mut null_key = false;
|
||||
let mut blank_key = false;
|
||||
let mut unique_key = false;
|
||||
for keyword in keywords.iter() {
|
||||
for keyword in keywords {
|
||||
let Some(argument) = &keyword.arg else {
|
||||
continue;
|
||||
};
|
||||
|
|
|
@ -156,7 +156,7 @@ pub(crate) fn unordered_body_content_in_model(
|
|||
// Track all the element types we've seen so far.
|
||||
let mut element_types = Vec::new();
|
||||
let mut prev_element_type = None;
|
||||
for element in body.iter() {
|
||||
for element in body {
|
||||
let Some(element_type) = get_element_type(element, checker.semantic()) else {
|
||||
continue;
|
||||
};
|
||||
|
|
|
@ -57,12 +57,7 @@ impl From<Options> for Settings {
|
|||
.function_names
|
||||
.unwrap_or_else(default_func_names)
|
||||
.into_iter()
|
||||
.chain(
|
||||
options
|
||||
.extend_function_names
|
||||
.unwrap_or_default()
|
||||
.into_iter(),
|
||||
)
|
||||
.chain(options.extend_function_names.unwrap_or_default())
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -322,7 +322,7 @@ fn strings(locator: &Locator, sequence: &[TextRange], settings: &Settings) -> Ve
|
|||
string_contents.contains(good_single(quotes_settings.inline_quotes))
|
||||
});
|
||||
|
||||
for (range, trivia) in sequence.iter().zip(trivia.into_iter()) {
|
||||
for (range, trivia) in sequence.iter().zip(trivia) {
|
||||
if trivia.is_multiline {
|
||||
// If our string is or contains a known good string, ignore it.
|
||||
if trivia
|
||||
|
|
|
@ -227,9 +227,9 @@ impl Violation for MissingSpaceAfterTodoColon {
|
|||
|
||||
static ISSUE_LINK_REGEX_SET: Lazy<RegexSet> = Lazy::new(|| {
|
||||
RegexSet::new([
|
||||
r#"^#\s*(http|https)://.*"#, // issue link
|
||||
r#"^#\s*\d+$"#, // issue code - like "003"
|
||||
r#"^#\s*[A-Z]{1,6}\-?\d+$"#, // issue code - like "TD003"
|
||||
r"^#\s*(http|https)://.*", // issue link
|
||||
r"^#\s*\d+$", // issue code - like "003"
|
||||
r"^#\s*[A-Z]{1,6}\-?\d+$", // issue code - like "TD003"
|
||||
])
|
||||
.unwrap()
|
||||
});
|
||||
|
|
|
@ -38,7 +38,7 @@ pub(crate) fn runtime_evaluated(
|
|||
|
||||
fn runtime_evaluated_base_class(base_classes: &[String], semantic: &SemanticModel) -> bool {
|
||||
if let ScopeKind::Class(ast::StmtClassDef { bases, .. }) = &semantic.scope().kind {
|
||||
for base in bases.iter() {
|
||||
for base in bases {
|
||||
if let Some(call_path) = semantic.resolve_call_path(base) {
|
||||
if base_classes
|
||||
.iter()
|
||||
|
@ -54,7 +54,7 @@ fn runtime_evaluated_base_class(base_classes: &[String], semantic: &SemanticMode
|
|||
|
||||
fn runtime_evaluated_decorators(decorators: &[String], semantic: &SemanticModel) -> bool {
|
||||
if let ScopeKind::Class(ast::StmtClassDef { decorator_list, .. }) = &semantic.scope().kind {
|
||||
for decorator in decorator_list.iter() {
|
||||
for decorator in decorator_list {
|
||||
if let Some(call_path) = semantic.resolve_call_path(map_callable(&decorator.expression))
|
||||
{
|
||||
if decorators
|
||||
|
|
|
@ -47,7 +47,7 @@ pub(crate) fn order_imports<'a>(
|
|||
)
|
||||
.chain(
|
||||
// Include all star imports.
|
||||
block.import_from_star.into_iter(),
|
||||
block.import_from_star,
|
||||
)
|
||||
.map(
|
||||
|(
|
||||
|
|
|
@ -93,7 +93,7 @@ pub(crate) fn not_tests(
|
|||
if !matches!(&ops[..], [CmpOp::In | CmpOp::Is]) {
|
||||
return;
|
||||
}
|
||||
for op in ops.iter() {
|
||||
for op in ops {
|
||||
match op {
|
||||
CmpOp::In => {
|
||||
if check_not_in {
|
||||
|
|
|
@ -58,7 +58,7 @@ impl AlwaysAutofixableViolation for LRUCacheWithMaxsizeNone {
|
|||
|
||||
/// UP033
|
||||
pub(crate) fn lru_cache_with_maxsize_none(checker: &mut Checker, decorator_list: &[Decorator]) {
|
||||
for decorator in decorator_list.iter() {
|
||||
for decorator in decorator_list {
|
||||
let Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
args,
|
||||
|
|
|
@ -56,7 +56,7 @@ impl AlwaysAutofixableViolation for LRUCacheWithoutParameters {
|
|||
|
||||
/// UP011
|
||||
pub(crate) fn lru_cache_without_parameters(checker: &mut Checker, decorator_list: &[Decorator]) {
|
||||
for decorator in decorator_list.iter() {
|
||||
for decorator in decorator_list {
|
||||
let Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
args,
|
||||
|
|
|
@ -258,7 +258,7 @@ impl Configuration {
|
|||
rule_selections: config
|
||||
.rule_selections
|
||||
.into_iter()
|
||||
.chain(self.rule_selections.into_iter())
|
||||
.chain(self.rule_selections)
|
||||
.collect(),
|
||||
allowed_confusables: self.allowed_confusables.or(config.allowed_confusables),
|
||||
builtins: self.builtins.or(config.builtins),
|
||||
|
@ -269,17 +269,17 @@ impl Configuration {
|
|||
extend_exclude: config
|
||||
.extend_exclude
|
||||
.into_iter()
|
||||
.chain(self.extend_exclude.into_iter())
|
||||
.chain(self.extend_exclude)
|
||||
.collect(),
|
||||
extend_include: config
|
||||
.extend_include
|
||||
.into_iter()
|
||||
.chain(self.extend_include.into_iter())
|
||||
.chain(self.extend_include)
|
||||
.collect(),
|
||||
extend_per_file_ignores: config
|
||||
.extend_per_file_ignores
|
||||
.into_iter()
|
||||
.chain(self.extend_per_file_ignores.into_iter())
|
||||
.chain(self.extend_per_file_ignores)
|
||||
.collect(),
|
||||
external: self.external.or(config.external),
|
||||
fix: self.fix.or(config.fix),
|
||||
|
|
|
@ -153,7 +153,7 @@ impl IntoIterator for OptionGroup {
|
|||
|
||||
impl Display for OptionGroup {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
for (name, _) in self.iter() {
|
||||
for (name, _) in self {
|
||||
writeln!(f, "{name}")?;
|
||||
}
|
||||
|
||||
|
|
|
@ -181,13 +181,13 @@ impl Printer {
|
|||
|
||||
match self.format {
|
||||
SerializationFormat::Json => {
|
||||
JsonEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
||||
JsonEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
SerializationFormat::JsonLines => {
|
||||
JsonLinesEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
||||
JsonLinesEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
SerializationFormat::Junit => {
|
||||
JunitEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
||||
JunitEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
SerializationFormat::Text => {
|
||||
TextEmitter::default()
|
||||
|
@ -222,16 +222,16 @@ impl Printer {
|
|||
self.write_summary_text(writer, diagnostics)?;
|
||||
}
|
||||
SerializationFormat::Github => {
|
||||
GithubEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
||||
GithubEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
SerializationFormat::Gitlab => {
|
||||
GitlabEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
SerializationFormat::Pylint => {
|
||||
PylintEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
||||
PylintEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
SerializationFormat::Azure => {
|
||||
AzureEmitter::default().emit(writer, &diagnostics.messages, &context)?;
|
||||
AzureEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ impl Fix {
|
|||
)]
|
||||
pub fn unspecified_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
||||
Self {
|
||||
edits: std::iter::once(edit).chain(rest.into_iter()).collect(),
|
||||
edits: std::iter::once(edit).chain(rest).collect(),
|
||||
applicability: Applicability::Unspecified,
|
||||
isolation_level: IsolationLevel::default(),
|
||||
}
|
||||
|
@ -84,7 +84,7 @@ impl Fix {
|
|||
/// Create a new [`Fix`] with [automatic applicability](Applicability::Automatic) from multiple [`Edit`] elements.
|
||||
pub fn automatic_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
||||
Self {
|
||||
edits: std::iter::once(edit).chain(rest.into_iter()).collect(),
|
||||
edits: std::iter::once(edit).chain(rest).collect(),
|
||||
applicability: Applicability::Automatic,
|
||||
isolation_level: IsolationLevel::default(),
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ impl Fix {
|
|||
/// Create a new [`Fix`] with [suggested applicability](Applicability::Suggested) from multiple [`Edit`] elements.
|
||||
pub fn suggested_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
||||
Self {
|
||||
edits: std::iter::once(edit).chain(rest.into_iter()).collect(),
|
||||
edits: std::iter::once(edit).chain(rest).collect(),
|
||||
applicability: Applicability::Suggested,
|
||||
isolation_level: IsolationLevel::default(),
|
||||
}
|
||||
|
@ -120,7 +120,7 @@ impl Fix {
|
|||
/// Create a new [`Fix`] with [manual applicability](Applicability::Manual) from multiple [`Edit`] elements.
|
||||
pub fn manual_edits(edit: Edit, rest: impl IntoIterator<Item = Edit>) -> Self {
|
||||
Self {
|
||||
edits: std::iter::once(edit).chain(rest.into_iter()).collect(),
|
||||
edits: std::iter::once(edit).chain(rest).collect(),
|
||||
applicability: Applicability::Manual,
|
||||
isolation_level: IsolationLevel::default(),
|
||||
}
|
||||
|
|
|
@ -96,7 +96,7 @@ impl<Context> Copy for Arguments<'_, Context> {}
|
|||
|
||||
impl<Context> Clone for Arguments<'_, Context> {
|
||||
fn clone(&self) -> Self {
|
||||
Self(self.0)
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ pub(crate) fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenS
|
|||
}) => {
|
||||
let mut output = vec![];
|
||||
|
||||
for field in fields.named.iter() {
|
||||
for field in &fields.named {
|
||||
let docs: Vec<&Attribute> = field
|
||||
.attrs
|
||||
.iter()
|
||||
|
|
|
@ -201,7 +201,7 @@ if True:
|
|||
]
|
||||
);
|
||||
|
||||
let contents = r#"
|
||||
let contents = r"
|
||||
x = 1; import sys
|
||||
import os
|
||||
|
||||
|
@ -215,7 +215,7 @@ if True:
|
|||
|
||||
x = 1; \
|
||||
import os
|
||||
"#
|
||||
"
|
||||
.trim();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let indexer = Indexer::from_tokens(lxr.as_slice(), &Locator::new(contents));
|
||||
|
|
|
@ -28,7 +28,7 @@ impl<'a, T> AsRef<T> for RefEquality<'a, T> {
|
|||
|
||||
impl<'a, T> Clone for RefEquality<'a, T> {
|
||||
fn clone(&self) -> Self {
|
||||
Self(self.0)
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -136,7 +136,7 @@ for node in nodes:
|
|||
fn format(&self) -> Self::Format<'_> {{
|
||||
FormatRefWithRule::new(
|
||||
self,
|
||||
crate::{groups[group_for_node(node)]}::{to_camel_case(node)}::Format{node}::default(),
|
||||
crate::{groups[group_for_node(node)]}::{to_camel_case(node)}::Format{node},
|
||||
)
|
||||
}}
|
||||
}}
|
||||
|
@ -149,7 +149,7 @@ for node in nodes:
|
|||
fn into_format(self) -> Self::Format {{
|
||||
FormatOwnedWithRule::new(
|
||||
self,
|
||||
crate::{groups[group_for_node(node)]}::{to_camel_case(node)}::Format{node}::default(),
|
||||
crate::{groups[group_for_node(node)]}::{to_camel_case(node)}::Format{node},
|
||||
)
|
||||
}}
|
||||
}}
|
||||
|
|
|
@ -436,7 +436,7 @@ mod tests {
|
|||
|
||||
let comment_ranges = comment_ranges.finish();
|
||||
|
||||
let parsed = parse_tokens(tokens.into_iter(), Mode::Module, "test.py")
|
||||
let parsed = parse_tokens(tokens, Mode::Module, "test.py")
|
||||
.expect("Expect source to be valid Python");
|
||||
|
||||
CommentsTestCase {
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -26,7 +26,7 @@ impl<'ast> AsFormat<PyFormatContext<'ast>> for Mod {
|
|||
type Format<'a> = FormatRefWithRule<'a, Mod, FormatMod, PyFormatContext<'ast>>;
|
||||
|
||||
fn format(&self) -> Self::Format<'_> {
|
||||
FormatRefWithRule::new(self, FormatMod::default())
|
||||
FormatRefWithRule::new(self, FormatMod)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -34,6 +34,6 @@ impl<'ast> IntoFormat<PyFormatContext<'ast>> for Mod {
|
|||
type Format = FormatOwnedWithRule<Mod, FormatMod, PyFormatContext<'ast>>;
|
||||
|
||||
fn into_format(self) -> Self::Format {
|
||||
FormatOwnedWithRule::new(self, FormatMod::default())
|
||||
FormatOwnedWithRule::new(self, FormatMod)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ impl<'ast> AsFormat<PyFormatContext<'ast>> for Stmt {
|
|||
type Format<'a> = FormatRefWithRule<'a, Stmt, FormatStmt, PyFormatContext<'ast>>;
|
||||
|
||||
fn format(&self) -> Self::Format<'_> {
|
||||
FormatRefWithRule::new(self, FormatStmt::default())
|
||||
FormatRefWithRule::new(self, FormatStmt)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -80,6 +80,6 @@ impl<'ast> IntoFormat<PyFormatContext<'ast>> for Stmt {
|
|||
type Format = FormatOwnedWithRule<Stmt, FormatStmt, PyFormatContext<'ast>>;
|
||||
|
||||
fn into_format(self) -> Self::Format {
|
||||
FormatOwnedWithRule::new(self, FormatStmt::default())
|
||||
FormatOwnedWithRule::new(self, FormatStmt)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -124,7 +124,7 @@ impl<'def, 'ast> AsFormat<PyFormatContext<'ast>> for AnyFunctionDefinition<'def>
|
|||
> where Self: 'a;
|
||||
|
||||
fn format(&self) -> Self::Format<'_> {
|
||||
FormatRefWithRule::new(self, FormatAnyFunctionDef::default())
|
||||
FormatRefWithRule::new(self, FormatAnyFunctionDef)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue