Use nightly rustfmt with rustfmt.toml (#536)

This commit is contained in:
Charlie Marsh 2022-11-01 20:34:38 -04:00 committed by GitHub
parent c68c6b5424
commit 79ca66ace5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
40 changed files with 277 additions and 202 deletions

View file

@ -14,7 +14,8 @@ jobs:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
toolchain: 1.63.0
- uses: actions/cache@v3
env:
cache-name: cache-cargo
@ -36,7 +37,9 @@ jobs:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
toolchain: nightly-2022-11-01
components: rustfmt
- uses: actions/cache@v3
env:
cache-name: cache-cargo
@ -58,7 +61,9 @@ jobs:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
toolchain: nightly-2022-11-01
components: clippy
- uses: actions/cache@v3
env:
cache-name: cache-cargo
@ -80,7 +85,8 @@ jobs:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
toolchain: nightly-2022-11-01
- uses: actions/cache@v3
env:
cache-name: cache-cargo
@ -102,7 +108,8 @@ jobs:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
toolchain: nightly-2022-11-01
- uses: actions/setup-python@v4
with:
python-version: "3.10"

View file

@ -647,9 +647,14 @@ Assuming you have `cargo` installed, you can run:
```shell
cargo run resources/test/fixtures
cargo fmt
cargo clippy
cargo test
```
For development, we use [nightly Rust](https://rust-lang.github.io/rustup/concepts/channels.html#working-with-nightly-rust):
```shell
cargo +nightly fmt
cargo +nightly clippy
cargo +nightly test
```
## Releases

View file

@ -1,7 +1,6 @@
use std::path::Path;
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use ruff::fs;
use ruff::source_code_locator::compute_offsets;

View file

@ -1,12 +1,10 @@
use std::collections::HashMap;
use anyhow::Result;
use ruff::flake8_quotes;
use ruff::flake8_quotes::settings::Quote;
use ruff::pep8_naming;
use ruff::settings::options::Options;
use ruff::settings::pyproject::Pyproject;
use ruff::{flake8_quotes, pep8_naming};
use crate::parser;

View file

@ -5,7 +5,6 @@ use std::path::PathBuf;
use anyhow::Result;
use clap::Parser;
use configparser::ini::Ini;
use flake8_to_ruff::converter;
#[derive(Parser)]
@ -14,7 +13,8 @@ use flake8_to_ruff::converter;
long_about = None
)]
struct Cli {
/// Path to the Flake8 configuration file (e.g., 'setup.cfg', 'tox.ini', or '.flake8').
/// Path to the Flake8 configuration file (e.g., 'setup.cfg', 'tox.ini', or
/// '.flake8').
#[arg(required = true)]
file: PathBuf,
}

View file

@ -4,13 +4,13 @@ use std::str::FromStr;
use anyhow::Result;
use once_cell::sync::Lazy;
use regex::Regex;
use ruff::checks_gen::CheckCodePrefix;
use ruff::settings::types::PatternPrefixPair;
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
/// Parse a comma-separated list of `CheckCodePrefix` values (e.g., "F401,E501").
/// Parse a comma-separated list of `CheckCodePrefix` values (e.g.,
/// "F401,E501").
pub fn parse_prefix_codes(value: &str) -> Vec<CheckCodePrefix> {
let mut codes: Vec<CheckCodePrefix> = vec![];
for code in COMMA_SEPARATED_LIST_RE.split(value) {
@ -184,7 +184,6 @@ pub fn collect_per_file_ignores(
#[cfg(test)]
mod tests {
use anyhow::Result;
use ruff::checks_gen::CheckCodePrefix;
use ruff::settings::types::PatternPrefixPair;

View file

@ -4,9 +4,8 @@ use std::collections::{BTreeMap, BTreeSet};
use codegen::{Scope, Type, Variant};
use itertools::Itertools;
use strum::IntoEnumIterator;
use ruff::checks::CheckCode;
use strum::IntoEnumIterator;
fn main() {
// Build up a map from prefix to matching CheckCodes.

View file

@ -1,8 +1,7 @@
//! Generate a Markdown-compatible table of supported lint rules.
use strum::IntoEnumIterator;
use ruff::checks::{CheckCategory, CheckCode};
use strum::IntoEnumIterator;
fn main() {
for check_category in CheckCategory::iter() {

View file

@ -2,10 +2,9 @@ use std::path::PathBuf;
use anyhow::Result;
use clap::Parser;
use rustpython_parser::parser;
use ruff::code_gen::SourceGenerator;
use ruff::fs;
use rustpython_parser::parser;
#[derive(Debug, Parser)]
struct Cli {

View file

@ -4,9 +4,8 @@ use std::path::PathBuf;
use anyhow::Result;
use clap::Parser;
use rustpython_parser::parser;
use ruff::fs;
use rustpython_parser::parser;
#[derive(Debug, Parser)]
struct Cli {

View file

@ -4,9 +4,8 @@ use std::path::PathBuf;
use anyhow::Result;
use clap::Parser;
use rustpython_parser::lexer;
use ruff::fs;
use rustpython_parser::lexer;
#[derive(Debug, Parser)]
struct Cli {

15
rustfmt.toml Normal file
View file

@ -0,0 +1,15 @@
condense_wildcard_suffixes = true
edition = "2021"
format_strings = true
group_imports = "StdExternalCrate"
hex_literal_case = "Lower"
imports_granularity = "Module"
max_width = 100
normalize_comments = true
normalize_doc_attributes = true
reorder_impl_items = true
reorder_imports = true
reorder_modules = true
unstable_features = true
use_field_init_shorthand = true
wrap_comments = true

View file

@ -41,7 +41,8 @@ pub fn match_name_or_attr(expr: &Expr, target: &str) -> bool {
/// Return `true` if the `Expr` is a reference to `${module}.${target}`.
///
/// Useful for, e.g., ensuring that a `Union` reference represents `typing.Union`.
/// Useful for, e.g., ensuring that a `Union` reference represents
/// `typing.Union`.
pub fn match_name_or_attr_from_module(
expr: &Expr,
target: &str,
@ -66,8 +67,8 @@ pub fn match_name_or_attr_from_module(
static DUNDER_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"__[^\s]+__").unwrap());
pub fn is_assignment_to_a_dunder(node: &StmtKind) -> bool {
// Check whether it's an assignment to a dunder, with or without a type annotation.
// This is what pycodestyle (as of 2.9.1) does.
// Check whether it's an assignment to a dunder, with or without a type
// annotation. This is what pycodestyle (as of 2.9.1) does.
match node {
StmtKind::Assign {
targets,
@ -128,7 +129,8 @@ pub fn is_super_call_with_arguments(func: &Expr, args: &[Expr]) -> bool {
}
}
/// Convert a location within a file (relative to `base`) to an absolute position.
/// Convert a location within a file (relative to `base`) to an absolute
/// position.
pub fn to_absolute(relative: &Location, base: &Location) -> Location {
if relative.row() == 1 {
Location::new(

View file

@ -8,7 +8,8 @@ fn relocate_keyword(keyword: &mut Keyword, location: Range) {
relocate_expr(&mut keyword.node.value, location);
}
/// Change an expression's location (recursively) to match a desired, fixed location.
/// Change an expression's location (recursively) to match a desired, fixed
/// location.
pub fn relocate_expr(expr: &mut Expr, location: Range) {
expr.location = location.location;
expr.end_location = Some(location.end_location);

View file

@ -85,8 +85,8 @@ pub enum BindingKind {
pub struct Binding {
pub kind: BindingKind,
pub range: Range,
/// Tuple of (scope index, range) indicating the scope and range at which the binding was
/// last used.
/// Tuple of (scope index, range) indicating the scope and range at which
/// the binding was last used.
pub used: Option<(usize, Range)>,
}

View file

@ -3,8 +3,7 @@ use std::collections::BTreeSet;
use itertools::Itertools;
use rustpython_parser::ast::Location;
use crate::autofix::Fix;
use crate::autofix::Patch;
use crate::autofix::{Fix, Patch};
use crate::checks::Check;
#[derive(Hash)]
@ -55,14 +54,15 @@ fn apply_fixes<'a>(fixes: impl Iterator<Item = &'a mut Fix>, contents: &str) ->
let mut applied: BTreeSet<&Patch> = Default::default();
for fix in fixes.sorted_by_key(|fix| fix.patch.location) {
// If we already applied an identical fix as part of another correction, skip any
// re-application.
// If we already applied an identical fix as part of another correction, skip
// any re-application.
if applied.contains(&fix.patch) {
fix.applied = true;
continue;
}
// Best-effort approach: if this fix overlaps with a fix we've already applied, skip it.
// Best-effort approach: if this fix overlaps with a fix we've already applied,
// skip it.
if last_pos > fix.patch.location {
continue;
}
@ -113,8 +113,7 @@ mod tests {
use rustpython_parser::ast::Location;
use crate::autofix::fixer::apply_fixes;
use crate::autofix::Fix;
use crate::autofix::Patch;
use crate::autofix::{Fix, Patch};
#[test]
fn empty_file() -> Result<()> {

View file

@ -4,7 +4,8 @@ use rustpython_parser::ast::{ExcepthandlerKind, Location, Stmt, StmtKind};
use crate::autofix::Fix;
/// Determine if a body contains only a single statement, taking into account deleted.
/// Determine if a body contains only a single statement, taking into account
/// deleted.
fn has_single_child(body: &[Stmt], deleted: &[&Stmt]) -> bool {
body.iter().filter(|child| !deleted.contains(child)).count() == 1
}
@ -80,7 +81,8 @@ pub fn remove_stmt(stmt: &Stmt, parent: Option<&Stmt>, deleted: &[&Stmt]) -> Res
))
} else {
// Otherwise, nuke the entire line.
// TODO(charlie): This logic assumes that there are no multi-statement physical lines.
// TODO(charlie): This logic assumes that there are no multi-statement physical
// lines.
Ok(Fix::deletion(
Location::new(stmt.location.row(), 0),
Location::new(stmt.end_location.unwrap().row() + 1, 0),

View file

@ -1,4 +1,5 @@
// cacache uses asyncd-std which has no wasm support, so currently no caching support on wasm
// cacache uses asyncd-std which has no wasm support, so currently no caching
// support on wasm
#![cfg_attr(
target_family = "wasm",
allow(unused_imports, unused_variables, dead_code)
@ -127,7 +128,7 @@ pub fn get(
}
Err(e) => error!("Failed to deserialize encoded cache entry: {e:?}"),
},
Err(EntryNotFound(_, _)) => {}
Err(EntryNotFound(..)) => {}
Err(e) => error!("Failed to read from cache: {e:?}"),
}
None

View file

@ -123,7 +123,8 @@ impl<'a> Checker<'a> {
}
}
/// Return `true` if a patch should be generated under the given autofix `Mode`.
/// Return `true` if a patch should be generated under the given autofix
/// `Mode`.
pub fn patch(&self) -> bool {
self.autofix.patch()
}
@ -287,7 +288,8 @@ where
self.check_builtin_shadowing(name, Range::from_located(stmt), true);
// Visit the decorators and arguments, but avoid the body, which will be deferred.
// Visit the decorators and arguments, but avoid the body, which will be
// deferred.
for expr in decorator_list {
self.visit_expr(expr);
}
@ -1540,7 +1542,8 @@ where
flake8_bugbear::plugins::mutable_argument_default(self, arguments)
}
// Bind, but intentionally avoid walking default expressions, as we handle them upstream.
// Bind, but intentionally avoid walking default expressions, as we handle them
// upstream.
for arg in &arguments.posonlyargs {
self.visit_arg(arg);
}
@ -1559,7 +1562,8 @@ where
}
fn visit_arg(&mut self, arg: &'b Arg) {
// Bind, but intentionally avoid walking the annotation, as we handle it upstream.
// Bind, but intentionally avoid walking the annotation, as we handle it
// upstream.
self.add_binding(
arg.node.arg.to_string(),
Binding {
@ -1601,8 +1605,9 @@ fn try_mark_used(scope: &mut Scope, scope_id: usize, id: &str, expr: &Expr) -> b
// Mark the binding as used.
binding.used = Some((scope_id, Range::from_located(expr)));
// If the name of the sub-importation is the same as an alias of another importation and the
// alias is used, that sub-importation should be marked as used too.
// If the name of the sub-importation is the same as an alias of another
// importation and the alias is used, that sub-importation should be
// marked as used too.
//
// This handles code like:
// import pyarrow as pa
@ -1712,7 +1717,8 @@ impl<'a> Checker<'a> {
fn add_binding(&mut self, name: String, binding: Binding) {
let scope = &mut self.scopes[*(self.scope_stack.last().expect("No current scope found."))];
// TODO(charlie): Don't treat annotations as assignments if there is an existing value.
// TODO(charlie): Don't treat annotations as assignments if there is an existing
// value.
let binding = match scope.values.get(&name) {
None => binding,
Some(existing) => {

View file

@ -5,8 +5,7 @@ use std::collections::BTreeMap;
use rustpython_parser::ast::Location;
use crate::ast::types::Range;
use crate::autofix::fixer;
use crate::autofix::Fix;
use crate::autofix::{fixer, Fix};
use crate::checks::{Check, CheckCode, CheckKind};
use crate::noqa;
use crate::noqa::Directive;
@ -68,7 +67,7 @@ pub fn check_lines(
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
match noqa {
(Directive::All(_, _), matches) => {
(Directive::All(..), matches) => {
matches.push(check.kind.code().as_ref());
ignored.push(index)
}
@ -100,7 +99,7 @@ pub fn check_lines(
);
match noqa {
(Directive::All(_, _), matches) => {
(Directive::All(..), matches) => {
matches.push(check.kind.code().as_ref());
}
(Directive::Codes(_, _, codes), matches) => {
@ -118,8 +117,8 @@ pub fn check_lines(
// Enforce newlines at end of files.
if settings.enabled.contains(&CheckCode::W292) && !contents.ends_with('\n') {
// Note: if `lines.last()` is `None`, then `contents` is empty (and so we don't want to
// raise W292 anyway).
// Note: if `lines.last()` is `None`, then `contents` is empty (and so we don't
// want to raise W292 anyway).
if let Some(line) = lines.last() {
let lineno = lines.len() - 1;
let noqa_lineno = noqa_line_for
@ -140,7 +139,7 @@ pub fn check_lines(
);
match noqa {
(Directive::All(_, _), matches) => {
(Directive::All(..), matches) => {
matches.push(check.kind.code().as_ref());
}
(Directive::Codes(_, _, codes), matches) => {
@ -227,12 +226,11 @@ pub fn check_lines(
#[cfg(test)]
mod tests {
use super::check_lines;
use crate::autofix::fixer;
use crate::checks::{Check, CheckCode};
use crate::settings::Settings;
use super::check_lines;
#[test]
fn e501_non_ascii_char() {
let line = "'\u{4e9c}' * 2"; // 7 in UTF-32, 9 in UTF-8.

View file

@ -395,7 +395,8 @@ pub enum CheckKind {
}
impl CheckCode {
/// The source for the check (either the AST, the filesystem, or the physical lines).
/// The source for the check (either the AST, the filesystem, or the
/// physical lines).
pub fn lint_source(&self) -> &'static LintSource {
match self {
CheckCode::E501 | CheckCode::W292 | CheckCode::M001 => &LintSource::Lines,
@ -779,14 +780,14 @@ impl CheckKind {
CheckKind::FutureFeatureNotDefined(_) => &CheckCode::F407,
CheckKind::IOError(_) => &CheckCode::E902,
CheckKind::IfTuple => &CheckCode::F634,
CheckKind::ImportShadowedByLoopVar(_, _) => &CheckCode::F402,
CheckKind::ImportShadowedByLoopVar(..) => &CheckCode::F402,
CheckKind::ImportStarNotPermitted(_) => &CheckCode::F406,
CheckKind::ImportStarUsage(_, _) => &CheckCode::F405,
CheckKind::ImportStarUsage(..) => &CheckCode::F405,
CheckKind::ImportStarUsed(_) => &CheckCode::F403,
CheckKind::InvalidPrintSyntax => &CheckCode::F633,
CheckKind::IsLiteral => &CheckCode::F632,
CheckKind::LateFutureImport => &CheckCode::F404,
CheckKind::LineTooLong(_, _) => &CheckCode::E501,
CheckKind::LineTooLong(..) => &CheckCode::E501,
CheckKind::ModuleImportNotAtTopOfFile => &CheckCode::E402,
CheckKind::MultiValueRepeatedKeyLiteral => &CheckCode::F601,
CheckKind::MultiValueRepeatedKeyVariable(_) => &CheckCode::F602,
@ -797,13 +798,13 @@ impl CheckKind {
CheckKind::ReturnOutsideFunction => &CheckCode::F706,
CheckKind::SyntaxError(_) => &CheckCode::E999,
CheckKind::ExpressionsInStarAssignment => &CheckCode::F621,
CheckKind::TrueFalseComparison(_, _) => &CheckCode::E712,
CheckKind::TrueFalseComparison(..) => &CheckCode::E712,
CheckKind::TwoStarredExpressions => &CheckCode::F622,
CheckKind::TypeComparison => &CheckCode::E721,
CheckKind::UndefinedExport(_) => &CheckCode::F822,
CheckKind::UndefinedLocal(_) => &CheckCode::F823,
CheckKind::UndefinedName(_) => &CheckCode::F821,
CheckKind::UnusedImport(_, _) => &CheckCode::F401,
CheckKind::UnusedImport(..) => &CheckCode::F401,
CheckKind::UnusedVariable(_) => &CheckCode::F841,
CheckKind::YieldOutsideFunction => &CheckCode::F704,
// pycodestyle warnings
@ -851,7 +852,7 @@ impl CheckKind {
CheckKind::TypeOfPrimitive(_) => &CheckCode::U003,
CheckKind::UnnecessaryAbspath => &CheckCode::U002,
CheckKind::UselessMetaclassType => &CheckCode::U001,
CheckKind::DeprecatedUnittestAlias(_, _) => &CheckCode::U005,
CheckKind::DeprecatedUnittestAlias(..) => &CheckCode::U005,
CheckKind::UsePEP585Annotation(_) => &CheckCode::U006,
CheckKind::UsePEP604Annotation => &CheckCode::U007,
CheckKind::UselessObjectInheritance(_) => &CheckCode::U004,
@ -1059,7 +1060,9 @@ impl CheckKind {
}
// pycodestyle warnings
CheckKind::NoNewLineAtEndOfFile => "No newline at end of file".to_string(),
CheckKind::InvalidEscapeSequence(char) => format!("Invalid escape sequence: '\\{char}'"),
CheckKind::InvalidEscapeSequence(char) => {
format!("Invalid escape sequence: '\\{char}'")
}
// flake8-builtins
CheckKind::BuiltinVariableShadowing(name) => {
format!("Variable `{name}` is shadowing a python builtin")
@ -1071,15 +1074,25 @@ impl CheckKind {
format!("Class attribute `{name}` is shadowing a python builtin")
}
// flake8-bugbear
CheckKind::UnaryPrefixIncrement => "Python does not support the unary prefix increment. Writing `++n` is equivalent to `+(+(n))`, which equals `n`. You meant `n += 1`.".to_string(),
CheckKind::MutableArgumentDefault => "Do not use mutable data structures for argument defaults.".to_string(),
CheckKind::UnusedLoopControlVariable(name) => format!("Loop control variable `{name}` not used within the loop body. If this is intended, start the name with an underscore."),
CheckKind::DoNotAssertFalse => {
"Do not `assert False` (`python -O` removes these calls), raise `AssertionError()`"
.to_string()
CheckKind::UnaryPrefixIncrement => "Python does not support the unary prefix \
increment. Writing `++n` is equivalent to \
`+(+(n))`, which equals `n`. You meant `n += 1`."
.to_string(),
CheckKind::MutableArgumentDefault => {
"Do not use mutable data structures for argument defaults.".to_string()
}
CheckKind::UnusedLoopControlVariable(name) => format!(
"Loop control variable `{name}` not used within the loop body. If this is \
intended, start the name with an underscore."
),
CheckKind::DoNotAssertFalse => "Do not `assert False` (`python -O` removes these \
calls), raise `AssertionError()`"
.to_string(),
CheckKind::RedundantTupleInExceptionHandler(name) => {
format!("A length-one tuple literal is redundant. Write `except {name}:` instead of `except ({name},):`.")
format!(
"A length-one tuple literal is redundant. Write `except {name}:` instead of \
`except ({name},):`."
)
}
CheckKind::DuplicateHandlerException(names) => {
if names.len() == 1 {
@ -1091,7 +1104,11 @@ impl CheckKind {
}
}
CheckKind::NoAssertRaisesException => {
"`assertRaises(Exception):` should be considered evil. It can lead to your test passing even if the code being tested is never executed due to a typo. Either assert for a more specific exception (builtin or custom), use `assertRaisesRegex`, or use the context manager form of `assertRaises`.".to_string()
"`assertRaises(Exception):` should be considered evil. It can lead to your test \
passing even if the code being tested is never executed due to a typo. Either \
assert for a more specific exception (builtin or custom), use \
`assertRaisesRegex`, or use the context manager form of `assertRaises`."
.to_string()
}
CheckKind::DuplicateTryBlockException(name) => {
format!("try-except block with duplicate exception `{name}`")
@ -1124,22 +1141,26 @@ impl CheckKind {
CheckKind::UnnecessaryLiteralWithinTupleCall(literal) => {
if literal == "list" {
format!(
"Unnecessary `{literal}` literal passed to `tuple()` (rewrite as a `tuple` literal)"
"Unnecessary `{literal}` literal passed to `tuple()` (rewrite as a \
`tuple` literal)"
)
} else {
format!(
"Unnecessary `{literal}` literal passed to `tuple()` (remove the outer call to `tuple()`)"
"Unnecessary `{literal}` literal passed to `tuple()` (remove the outer \
call to `tuple()`)"
)
}
}
CheckKind::UnnecessaryLiteralWithinListCall(literal) => {
if literal == "list" {
format!(
"Unnecessary `{literal}` literal passed to `list()` (remove the outer call to `list()`)"
"Unnecessary `{literal}` literal passed to `list()` (remove the outer \
call to `list()`)"
)
} else {
format!(
"Unnecessary `{literal}` literal passed to `list()` (rewrite as a `list` literal)"
"Unnecessary `{literal}` literal passed to `list()` (rewrite as a `list` \
literal)"
)
}
}
@ -1169,25 +1190,29 @@ impl CheckKind {
CheckKind::PrintFound => "`print` found".to_string(),
CheckKind::PPrintFound => "`pprint` found".to_string(),
// flake8-quotes
CheckKind::BadQuotesInlineString(quote) => {
match quote {
CheckKind::BadQuotesInlineString(quote) => match quote {
Quote::Single => "Double quotes found but single quotes preferred".to_string(),
Quote::Double => "Single quotes found but double quotes preferred".to_string(),
},
CheckKind::BadQuotesMultilineString(quote) => match quote {
Quote::Single => {
"Double quote multiline found but single quotes preferred".to_string()
}
Quote::Double => {
"Single quote multiline found but double quotes preferred".to_string()
}
},
CheckKind::BadQuotesMultilineString(quote) => {
match quote {
Quote::Single => "Double quote multiline found but single quotes preferred".to_string(),
Quote::Double => "Single quote multiline found but double quotes preferred".to_string(),
CheckKind::BadQuotesDocstring(quote) => match quote {
Quote::Single => {
"Double quote docstring found but single quotes preferred".to_string()
}
Quote::Double => {
"Single quote docstring found but double quotes preferred".to_string()
}
},
CheckKind::BadQuotesDocstring(quote) => {
match quote {
Quote::Single => "Double quote docstring found but single quotes preferred".to_string(),
Quote::Double => "Single quote docstring found but double quotes preferred".to_string(),
CheckKind::AvoidQuoteEscape => {
"Change outer quotes to avoid escaping inner quotes".to_string()
}
},
CheckKind::AvoidQuoteEscape => "Change outer quotes to avoid escaping inner quotes".to_string(),
// pyupgrade
CheckKind::TypeOfPrimitive(primitive) => {
format!("Use `{}` instead of `type(...)`", primitive.builtin())
@ -1226,10 +1251,9 @@ impl CheckKind {
}
CheckKind::EndsInPeriod => "First line should end with a period".to_string(),
CheckKind::NonEmpty => "Docstring is empty".to_string(),
CheckKind::EndsInPunctuation => {
"First line should end with a period, question mark, or exclamation point"
.to_string()
}
CheckKind::EndsInPunctuation => "First line should end with a period, question mark, \
or exclamation point"
.to_string(),
CheckKind::FirstLineCapitalized => {
"First word of the first line should be properly capitalized".to_string()
}
@ -1291,7 +1315,10 @@ impl CheckKind {
format!("Missing dashed underline after section (\"{name}\")")
}
CheckKind::SectionUnderlineAfterName(name) => {
format!("Section underline should be in the line following the section's name (\"{name}\")")
format!(
"Section underline should be in the line following the section's name \
(\"{name}\")"
)
}
CheckKind::SectionUnderlineMatchesSectionLength(name) => {
format!("Section underline should match the length of its name (\"{name}\")")
@ -1391,7 +1418,8 @@ impl CheckKind {
}
}
/// The summary text for the check. Typically a truncated form of the body text.
/// The summary text for the check. Typically a truncated form of the body
/// text.
pub fn summary(&self) -> String {
match self {
CheckKind::UnaryPrefixIncrement => {

View file

@ -270,12 +270,14 @@ impl CheckCodePrefix {
CheckCodePrefix::B002 => vec![CheckCode::B002],
CheckCodePrefix::B006 => vec![CheckCode::B006],
CheckCodePrefix::B007 => vec![CheckCode::B007],
CheckCodePrefix::B01 => vec![
CheckCodePrefix::B01 => {
vec![
CheckCode::B011,
CheckCode::B013,
CheckCode::B014,
CheckCode::B017,
],
]
}
CheckCodePrefix::B011 => vec![CheckCode::B011],
CheckCodePrefix::B013 => vec![CheckCode::B013],
CheckCodePrefix::B014 => vec![CheckCode::B014],
@ -582,12 +584,14 @@ impl CheckCodePrefix {
CheckCode::E742,
CheckCode::E743,
],
CheckCodePrefix::E71 => vec![
CheckCodePrefix::E71 => {
vec![
CheckCode::E711,
CheckCode::E712,
CheckCode::E713,
CheckCode::E714,
],
]
}
CheckCodePrefix::E711 => vec![CheckCode::E711],
CheckCodePrefix::E712 => vec![CheckCode::E712],
CheckCodePrefix::E713 => vec![CheckCode::E713],
@ -680,12 +684,14 @@ impl CheckCodePrefix {
CheckCodePrefix::F62 => vec![CheckCode::F621, CheckCode::F622],
CheckCodePrefix::F621 => vec![CheckCode::F621],
CheckCodePrefix::F622 => vec![CheckCode::F622],
CheckCodePrefix::F63 => vec![
CheckCodePrefix::F63 => {
vec![
CheckCode::F631,
CheckCode::F632,
CheckCode::F633,
CheckCode::F634,
],
]
}
CheckCodePrefix::F631 => vec![CheckCode::F631],
CheckCodePrefix::F632 => vec![CheckCode::F632],
CheckCodePrefix::F633 => vec![CheckCode::F633],
@ -802,24 +808,30 @@ impl CheckCodePrefix {
CheckCodePrefix::N816 => vec![CheckCode::N816],
CheckCodePrefix::N817 => vec![CheckCode::N817],
CheckCodePrefix::N818 => vec![CheckCode::N818],
CheckCodePrefix::Q => vec![
CheckCodePrefix::Q => {
vec![
CheckCode::Q000,
CheckCode::Q001,
CheckCode::Q002,
CheckCode::Q003,
],
CheckCodePrefix::Q0 => vec![
]
}
CheckCodePrefix::Q0 => {
vec![
CheckCode::Q000,
CheckCode::Q001,
CheckCode::Q002,
CheckCode::Q003,
],
CheckCodePrefix::Q00 => vec![
]
}
CheckCodePrefix::Q00 => {
vec![
CheckCode::Q000,
CheckCode::Q001,
CheckCode::Q002,
CheckCode::Q003,
],
]
}
CheckCodePrefix::Q000 => vec![CheckCode::Q000],
CheckCodePrefix::Q001 => vec![CheckCode::Q001],
CheckCodePrefix::Q002 => vec![CheckCode::Q002],

View file

@ -9,8 +9,7 @@ use regex::Regex;
use crate::checks_gen::CheckCodePrefix;
use crate::printer::SerializationFormat;
use crate::settings::configuration::Configuration;
use crate::settings::types::PatternPrefixPair;
use crate::settings::types::PythonVersion;
use crate::settings::types::{PatternPrefixPair, PythonVersion};
#[derive(Debug, Parser)]
#[command(author, about = "ruff: An extremely fast Python linter.")]
@ -27,7 +26,8 @@ pub struct Cli {
/// Only log errors.
#[arg(short, long, group = "verbosity")]
pub quiet: bool,
/// Disable all logging (but still exit with status code "1" upon detecting errors).
/// Disable all logging (but still exit with status code "1" upon detecting
/// errors).
#[arg(short, long, group = "verbosity")]
pub silent: bool,
/// Exit with status code "0", even upon detecting errors.
@ -45,19 +45,22 @@ pub struct Cli {
/// List of error codes to enable.
#[arg(long, value_delimiter = ',')]
pub select: Vec<CheckCodePrefix>,
/// Like --select, but adds additional error codes on top of the selected ones.
/// Like --select, but adds additional error codes on top of the selected
/// ones.
#[arg(long, value_delimiter = ',')]
pub extend_select: Vec<CheckCodePrefix>,
/// List of error codes to ignore.
#[arg(long, value_delimiter = ',')]
pub ignore: Vec<CheckCodePrefix>,
/// Like --ignore, but adds additional error codes on top of the ignored ones.
/// Like --ignore, but adds additional error codes on top of the ignored
/// ones.
#[arg(long, value_delimiter = ',')]
pub extend_ignore: Vec<CheckCodePrefix>,
/// List of paths, used to exclude files and/or directories from checks.
#[arg(long, value_delimiter = ',')]
pub exclude: Vec<String>,
/// Like --exclude, but adds additional files and directories on top of the excluded ones.
/// Like --exclude, but adds additional files and directories on top of the
/// excluded ones.
#[arg(long, value_delimiter = ',')]
pub extend_exclude: Vec<String>,
/// List of mappings from file pattern to code to exclude
@ -139,7 +142,10 @@ pub fn warn_on(
path.to_string_lossy()
)
} else {
warn!("{code:?} was passed to {flag}, but ignored by the default `extend_ignore` field")
warn!(
"{code:?} was passed to {flag}, but ignored by the default `extend_ignore` \
field"
)
}
}
}

View file

@ -1,7 +1,6 @@
pub use assert_false::assert_false;
pub use assert_raises_exception::assert_raises_exception;
pub use duplicate_exceptions::duplicate_exceptions;
pub use duplicate_exceptions::duplicate_handler_exceptions;
pub use duplicate_exceptions::{duplicate_exceptions, duplicate_handler_exceptions};
pub use mutable_argument_default::mutable_argument_default;
pub use redundant_tuple_in_exception_handler::redundant_tuple_in_exception_handler;
pub use unary_prefix_increment::unary_prefix_increment;

View file

@ -148,8 +148,7 @@ mod tests {
use crate::checks::{Check, CheckCode};
use crate::flake8_quotes::settings::Quote;
use crate::linter::tokenize;
use crate::{flake8_quotes, linter, Settings};
use crate::{fs, noqa};
use crate::{flake8_quotes, fs, linter, noqa, Settings};
fn check_path(path: &Path, settings: &Settings, autofix: &fixer::Mode) -> Result<Vec<Check>> {
let contents = fs::read_file(path)?;

View file

@ -10,7 +10,8 @@ use rustpython_parser::lexer::Tok;
enum State {
// Start of the module: first string gets marked as a docstring.
ExpectModuleDocstring,
// After seeing a class definition, we're waiting for the block colon (and do bracket counting).
// After seeing a class definition, we're waiting for the block colon (and do bracket
// counting).
ExpectClassColon,
// After seeing the block colon in a class definition, we expect a docstring.
ExpectClassDocstring,

View file

@ -7,8 +7,7 @@ use std::path::{Path, PathBuf};
use anyhow::{anyhow, Result};
use log::debug;
use path_absolutize::path_dedot;
use path_absolutize::Absolutize;
use path_absolutize::{path_dedot, Absolutize};
use walkdir::{DirEntry, WalkDir};
use crate::checks::CheckCode;
@ -137,7 +136,8 @@ pub fn ignores_from_path<'a>(
.collect())
}
/// Convert any path to an absolute path (based on the current working directory).
/// Convert any path to an absolute path (based on the current working
/// directory).
pub fn normalize_path(path: &Path) -> PathBuf {
if let Ok(path) = path.absolutize() {
return path.to_path_buf();

View file

@ -5,9 +5,7 @@ use std::path::Path;
use anyhow::Result;
use log::debug;
use rustpython_parser::lexer::LexResult;
use settings::pyproject;
use settings::Settings;
use settings::{pyproject, Settings};
use crate::autofix::fixer::Mode;
use crate::checks::Check;

View file

@ -243,9 +243,7 @@ mod tests {
use crate::autofix::fixer;
use crate::checks::{Check, CheckCode};
use crate::linter::tokenize;
use crate::settings;
use crate::{fs, noqa};
use crate::{linter, Settings};
use crate::{fs, linter, noqa, settings, Settings};
fn check_path(path: &Path, settings: &Settings, autofix: &fixer::Mode) -> Result<Vec<Check>> {
let contents = fs::read_file(path)?;

View file

@ -12,36 +12,32 @@ use log::{debug, error};
use notify::{raw_watcher, RecursiveMode, Watcher};
#[cfg(not(target_family = "wasm"))]
use rayon::prelude::*;
use walkdir::DirEntry;
#[cfg(not(target_family = "wasm"))]
use ruff::cache;
use ruff::checks::CheckCode;
use ruff::checks::CheckKind;
use ruff::checks::{CheckCode, CheckKind};
use ruff::checks_gen::CheckCodePrefix;
use ruff::cli::{collect_per_file_ignores, warn_on, Cli, Warnable};
use ruff::fs::iter_python_files;
use ruff::linter::add_noqa_to_path;
use ruff::linter::autoformat_path;
use ruff::linter::{lint_path, lint_stdin};
use ruff::linter::{add_noqa_to_path, autoformat_path, lint_path, lint_stdin};
use ruff::logging::set_up_logging;
use ruff::message::Message;
use ruff::printer::{Printer, SerializationFormat};
use ruff::settings::configuration::Configuration;
use ruff::settings::pyproject;
use ruff::settings::types::FilePattern;
use ruff::settings::user::UserConfiguration;
use ruff::settings::Settings;
use ruff::settings::{pyproject, Settings};
use ruff::tell_user;
use walkdir::DirEntry;
#[cfg(feature = "update-informer")]
const CARGO_PKG_NAME: &str = env!("CARGO_PKG_NAME");
#[cfg(feature = "update-informer")]
const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
/// Shim that calls par_iter except for wasm because there's no wasm support in rayon yet
/// (there is a shim to be used for the web, but it requires js cooperation)
/// Unfortunately, ParallelIterator does not implement Iterator so the signatures diverge
/// Shim that calls par_iter except for wasm because there's no wasm support in
/// rayon yet (there is a shim to be used for the web, but it requires js
/// cooperation) Unfortunately, ParallelIterator does not implement Iterator so
/// the signatures diverge
#[cfg(not(target_family = "wasm"))]
fn par_iter<T: Sync>(iterable: &Vec<T>) -> impl ParallelIterator<Item = &T> {
iterable.par_iter()

View file

@ -60,7 +60,8 @@ pub fn extract_noqa_line_for(lxr: &[LexResult]) -> Vec<usize> {
min_line = min(min_line, start.row());
max_line = max(max_line, start.row());
// For now, we only care about preserving noqa directives across multi-line strings.
// For now, we only care about preserving noqa directives across multi-line
// strings.
if in_string {
for i in (noqa_line_for.len())..(min_line - 1) {
noqa_line_for.push(i + 1);
@ -123,7 +124,7 @@ fn add_noqa_inner(
output.push_str(line);
}
Directive::All(start, _) => output.push_str(&line[..start]),
Directive::Codes(start, _, _) => output.push_str(&line[..start]),
Directive::Codes(start, ..) => output.push_str(&line[..start]),
};
let codes: Vec<&str> = codes.iter().map(|code| code.as_ref()).collect();
output.push_str(" # noqa: ");

View file

@ -162,7 +162,7 @@ pub fn blank_before_after_function(checker: &mut Checker, definition: &Definitio
} = &docstring.node
{
if checker.settings.enabled.contains(&CheckCode::D201) {
let (before, _, _) = checker.locator.partition_source_code_at(
let (before, ..) = checker.locator.partition_source_code_at(
&Range::from_located(parent),
&Range::from_located(docstring),
);
@ -248,7 +248,7 @@ pub fn blank_before_after_class(checker: &mut Checker, definition: &Definition)
if checker.settings.enabled.contains(&CheckCode::D203)
|| checker.settings.enabled.contains(&CheckCode::D211)
{
let (before, _, _) = checker.locator.partition_source_code_at(
let (before, ..) = checker.locator.partition_source_code_at(
&Range::from_located(parent),
&Range::from_located(docstring),
);
@ -406,7 +406,8 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
let line_indent = helpers::leading_space(lines[i]);
// We only report tab indentation once, so only check if we haven't seen a tab yet.
// We only report tab indentation once, so only check if we haven't seen a tab
// yet.
has_seen_tab = has_seen_tab || line_indent.contains('\t');
if checker.settings.enabled.contains(&CheckCode::D207) {
@ -432,9 +433,10 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
}
// Like pydocstyle, we only report over-indentation if either: (1) every line
// (except, optionally, the last line) is over-indented, or (2) the last line (which
// contains the closing quotation marks) is over-indented. We can't know if we've
// achieved that condition until we've viewed all the lines, so for now, just track
// (except, optionally, the last line) is over-indented, or (2) the last line
// (which contains the closing quotation marks) is
// over-indented. We can't know if we've achieved that condition
// until we've viewed all the lines, so for now, just track
// the over-indentation status of every line.
if i < lines.len() - 1 {
if line_indent.len() > docstring_indent.len() {
@ -855,7 +857,8 @@ pub fn not_empty(checker: &mut Checker, definition: &Definition) -> bool {
true
}
/// D212, D214, D215, D405, D406, D407, D408, D409, D410, D411, D412, D413, D414, D416, D417
/// D212, D214, D215, D405, D406, D407, D408, D409, D410, D411, D412, D413,
/// D414, D416, D417
pub fn sections(checker: &mut Checker, definition: &Definition) {
if let Some(docstring) = definition.docstring {
if let ExprKind::Constant {
@ -1304,11 +1307,9 @@ fn missing_args(checker: &mut Checker, definition: &Definition, docstrings_args:
.chain(arguments.kwonlyargs.iter())
.skip(
// If this is a non-static method, skip `cls` or `self`.
if matches!(definition.kind, DefinitionKind::Method(_)) && !is_static(parent) {
1
} else {
0
},
usize::from(
matches!(definition.kind, DefinitionKind::Method(_)) && !is_static(parent),
),
)
.collect();
if let Some(arg) = &arguments.vararg {
@ -1403,7 +1404,8 @@ fn parameters_section(checker: &mut Checker, definition: &Definition, context: &
// Otherwise, it's just a list of parameters on the current line.
current_line.trim()
};
// Notably, NumPy lets you put multiple parameters of the same type on the same line.
// Notably, NumPy lets you put multiple parameters of the same type on the same
// line.
for parameter in parameters.split(',') {
docstring_args.insert(parameter.trim());
}

View file

@ -153,8 +153,8 @@ pub const BUILTINS: &[&str] = &[
"zip",
];
// Globally defined names which are not attributes of the builtins module, or are only present on
// some platforms.
// Globally defined names which are not attributes of the builtins module, or
// are only present on some platforms.
pub const MAGIC_GLOBALS: &[&str] = &[
"WindowsError",
"__annotations__",

View file

@ -186,7 +186,8 @@ pub fn match_annotated_subscript(
None
}
/// Returns `true` if `Expr` represents a reference to a typing object with a PEP 585 built-in.
/// Returns `true` if `Expr` represents a reference to a typing object with a
/// PEP 585 built-in.
pub fn is_pep585_builtin(expr: &Expr, typing_imports: Option<&BTreeSet<&str>>) -> bool {
match &expr.node {
ExprKind::Attribute { attr, value, .. } => {

View file

@ -24,8 +24,9 @@ pub fn super_args(
let mut parents = parents.iter().rev();
// For a `super` invocation to be unnecessary, the first argument needs to match the enclosing
// class, and the second argument needs to match the first argument to the enclosing function.
// For a `super` invocation to be unnecessary, the first argument needs to match
// the enclosing class, and the second argument needs to match the first
// argument to the enclosing function.
if let [first_arg, second_arg] = args {
// Find the enclosing function definition (if any).
if let Some(StmtKind::FunctionDef {

View file

@ -79,7 +79,8 @@ pub fn remove_class_def_base(
_ => None,
}
} else {
// Case 3: `object` is the last node, so we have to find the last token that isn't a comma.
// Case 3: `object` is the last node, so we have to find the last token that
// isn't a comma.
let mut fix_start: Option<Location> = None;
let mut fix_end: Option<Location> = None;
for (start, tok, end) in lexer::make_tokenizer(content).flatten() {

View file

@ -1,5 +1,6 @@
//! User-provided program settings, taking into account pyproject.toml and command-line options.
//! Structure mirrors the user-facing representation of the various parameters.
//! User-provided program settings, taking into account pyproject.toml and
//! command-line options. Structure mirrors the user-facing representation of
//! the various parameters.
use std::collections::BTreeMap;
use std::path::PathBuf;

View file

@ -1,5 +1,6 @@
//! Effective program settings, taking into account pyproject.toml and command-line options.
//! Structure is optimized for internal usage, as opposed to external visibility or parsing.
//! Effective program settings, taking into account pyproject.toml and
//! command-line options. Structure is optimized for internal usage, as opposed
//! to external visibility or parsing.
use std::collections::{BTreeMap, BTreeSet};
use std::hash::{Hash, Hasher};
@ -94,7 +95,8 @@ impl Hash for Settings {
}
}
/// Given a set of selected and ignored prefixes, resolve the set of enabled error codes.
/// Given a set of selected and ignored prefixes, resolve the set of enabled
/// error codes.
fn resolve_codes(
select: &[CheckCodePrefix],
extend_select: &[CheckCodePrefix],

View file

@ -25,7 +25,8 @@ pub fn compute_offsets(contents: &str) -> Vec<Vec<usize>> {
char_index = i + char.len_utf8();
}
}
// If we end in a newline, add an extra character to indicate the start of that line.
// If we end in a newline, add an extra character to indicate the start of that
// line.
if newline {
offsets[line_index].push(char_index);
}

View file

@ -1,4 +1,5 @@
//! Abstractions for tracking public and private visibility across modules, classes, and functions.
//! Abstractions for tracking public and private visibility across modules,
//! classes, and functions.
use std::path::Path;
@ -133,8 +134,8 @@ fn class_visibility(stmt: &Stmt) -> Visibility {
/// Transition a `VisibleScope` based on a new `Documentable` definition.
///
/// `scope` is the current `VisibleScope`, while `Documentable` and `Stmt` describe the current
/// node used to modify visibility.
/// `scope` is the current `VisibleScope`, while `Documentable` and `Stmt`
/// describe the current node used to modify visibility.
pub fn transition_scope(scope: &VisibleScope, stmt: &Stmt, kind: &Documentable) -> VisibleScope {
match kind {
Documentable::Function => VisibleScope {