mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-26 20:09:22 +00:00
refactor: Rename RuleOrigin to Linter
"origin" was accurate since ruff rules are currently always modeled after one origin (except the Ruff-specific rules). Since we however want to introduce a many-to-many mapping between codes and rules, the term "origin" no longer makes much sense. Rules usually don't have multiple origins but one linter implements a rule first and then others implement it later (often inspired from another linter). But we don't actually care much about where a rule originates from when mapping multiple rule codes to one rule implementation, so renaming RuleOrigin to Linter is less confusing with the many-to-many system.
This commit is contained in:
parent
babe1eb7be
commit
7fc42f8f85
12 changed files with 95 additions and 95 deletions
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
|
@ -94,7 +94,7 @@ jobs:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
override: true
|
override: true
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: Swatinem/rust-cache@v1
|
||||||
- run: ./scripts/add_rule.py --name DoTheThing --code PLC999 --origin pylint
|
- run: ./scripts/add_rule.py --name DoTheThing --code PLC999 --linter pylint
|
||||||
- run: cargo check
|
- run: cargo check
|
||||||
- run: ./scripts/add_plugin.py test --url https://pypi.org/project/-test/0.1.0/
|
- run: ./scripts/add_plugin.py test --url https://pypi.org/project/-test/0.1.0/
|
||||||
- run: cargo check
|
- run: cargo check
|
||||||
|
|
|
@ -69,7 +69,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
|
||||||
6. Update the generated files (documentation and generated code).
|
6. Update the generated files (documentation and generated code).
|
||||||
|
|
||||||
To define the violation, start by creating a dedicated file for your rule under the appropriate
|
To define the violation, start by creating a dedicated file for your rule under the appropriate
|
||||||
rule origin (e.g., `src/rules/flake8_bugbear/rules/abstract_base_class.rs`). That file should
|
rule linter (e.g., `src/rules/flake8_bugbear/rules/abstract_base_class.rs`). That file should
|
||||||
contain a struct defined via `define_violation!`, along with a function that creates the violation
|
contain a struct defined via `define_violation!`, along with a function that creates the violation
|
||||||
based on any required inputs. (Many of the existing examples live in `src/violations.rs`, but we're
|
based on any required inputs. (Many of the existing examples live in `src/violations.rs`, but we're
|
||||||
looking to place new rules in their own files.)
|
looking to place new rules in their own files.)
|
||||||
|
@ -81,7 +81,7 @@ collecting diagnostics as it goes.
|
||||||
If you need to inspect the AST, you can run `cargo +nightly dev print-ast` with a Python file. Grep
|
If you need to inspect the AST, you can run `cargo +nightly dev print-ast` with a Python file. Grep
|
||||||
for the `Check::new` invocations to understand how other, similar rules are implemented.
|
for the `Check::new` invocations to understand how other, similar rules are implemented.
|
||||||
|
|
||||||
To add a test fixture, create a file under `resources/test/fixtures/[origin]`, named to match
|
To add a test fixture, create a file under `resources/test/fixtures/[linter]`, named to match
|
||||||
the code you defined earlier (e.g., `resources/test/fixtures/pycodestyle/E402.py`). This file should
|
the code you defined earlier (e.g., `resources/test/fixtures/pycodestyle/E402.py`). This file should
|
||||||
contain a variety of violations and non-violations designed to evaluate and demonstrate the behavior
|
contain a variety of violations and non-violations designed to evaluate and demonstrate the behavior
|
||||||
of your lint rule.
|
of your lint rule.
|
||||||
|
@ -90,7 +90,7 @@ Run `cargo +nightly dev generate-all` to generate the code for your new fixture.
|
||||||
locally with (e.g.) `cargo run resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402`.
|
locally with (e.g.) `cargo run resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402`.
|
||||||
|
|
||||||
Once you're satisfied with the output, codify the behavior as a snapshot test by adding a new
|
Once you're satisfied with the output, codify the behavior as a snapshot test by adding a new
|
||||||
`test_case` macro in the relevant `src/[origin]/mod.rs` file. Then, run `cargo test --all`.
|
`test_case` macro in the relevant `src/[linter]/mod.rs` file. Then, run `cargo test --all`.
|
||||||
Your test will fail, but you'll be prompted to follow-up with `cargo insta review`. Accept the
|
Your test will fail, but you'll be prompted to follow-up with `cargo insta review`. Accept the
|
||||||
generated snapshot, then commit the snapshot file alongside the rest of your changes.
|
generated snapshot, then commit the snapshot file alongside the rest of your changes.
|
||||||
|
|
||||||
|
|
18
build.rs
18
build.rs
|
@ -4,7 +4,7 @@ use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let out_dir = PathBuf::from(std::env::var_os("OUT_DIR").unwrap());
|
let out_dir = PathBuf::from(std::env::var_os("OUT_DIR").unwrap());
|
||||||
generate_origin_name_and_url(&out_dir);
|
generate_linter_name_and_url(&out_dir);
|
||||||
}
|
}
|
||||||
|
|
||||||
const RULES_SUBMODULE_DOC_PREFIX: &str = "//! Rules from ";
|
const RULES_SUBMODULE_DOC_PREFIX: &str = "//! Rules from ";
|
||||||
|
@ -15,13 +15,13 @@ const RULES_SUBMODULE_DOC_PREFIX: &str = "//! Rules from ";
|
||||||
/// //! Rules from [Pyflakes](https://pypi.org/project/pyflakes/2.5.0/).
|
/// //! Rules from [Pyflakes](https://pypi.org/project/pyflakes/2.5.0/).
|
||||||
///
|
///
|
||||||
/// This function extracts the link label and url from these comments and
|
/// This function extracts the link label and url from these comments and
|
||||||
/// generates the `name` and `url` functions for the `RuleOrigin` enum
|
/// generates the `name` and `url` functions for the `Linter` enum
|
||||||
/// accordingly, so that they can be used by `ruff_dev::generate_rules_table`.
|
/// accordingly, so that they can be used by `ruff_dev::generate_rules_table`.
|
||||||
fn generate_origin_name_and_url(out_dir: &Path) {
|
fn generate_linter_name_and_url(out_dir: &Path) {
|
||||||
println!("cargo:rerun-if-changed=src/rules/");
|
println!("cargo:rerun-if-changed=src/rules/");
|
||||||
|
|
||||||
let mut name_match_arms: String = r#"RuleOrigin::Ruff => "Ruff-specific rules","#.into();
|
let mut name_match_arms: String = r#"Linter::Ruff => "Ruff-specific rules","#.into();
|
||||||
let mut url_match_arms: String = r#"RuleOrigin::Ruff => None,"#.into();
|
let mut url_match_arms: String = r#"Linter::Ruff => None,"#.into();
|
||||||
|
|
||||||
for file in fs::read_dir("src/rules/")
|
for file in fs::read_dir("src/rules/")
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -62,14 +62,14 @@ fn generate_origin_name_and_url(out_dir: &Path) {
|
||||||
})
|
})
|
||||||
.collect::<String>();
|
.collect::<String>();
|
||||||
|
|
||||||
name_match_arms.push_str(&format!(r#"RuleOrigin::{variant_name} => "{name}","#));
|
name_match_arms.push_str(&format!(r#"Linter::{variant_name} => "{name}","#));
|
||||||
url_match_arms.push_str(&format!(r#"RuleOrigin::{variant_name} => Some("{url}"),"#));
|
url_match_arms.push_str(&format!(r#"Linter::{variant_name} => Some("{url}"),"#));
|
||||||
}
|
}
|
||||||
|
|
||||||
write!(
|
write!(
|
||||||
BufWriter::new(fs::File::create(out_dir.join("origin.rs")).unwrap()),
|
BufWriter::new(fs::File::create(out_dir.join("linter.rs")).unwrap()),
|
||||||
"
|
"
|
||||||
impl RuleOrigin {{
|
impl Linter {{
|
||||||
pub fn name(&self) -> &'static str {{
|
pub fn name(&self) -> &'static str {{
|
||||||
match self {{ {name_match_arms} }}
|
match self {{ {name_match_arms} }}
|
||||||
}}
|
}}
|
||||||
|
|
|
@ -285,7 +285,7 @@ pub fn show_files(
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct Explanation<'a> {
|
struct Explanation<'a> {
|
||||||
code: &'a str,
|
code: &'a str,
|
||||||
origin: &'a str,
|
linter: &'a str,
|
||||||
summary: &'a str,
|
summary: &'a str,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -315,7 +315,7 @@ pub fn explain(rule: &Rule, format: SerializationFormat) -> Result<()> {
|
||||||
"{}",
|
"{}",
|
||||||
serde_json::to_string_pretty(&Explanation {
|
serde_json::to_string_pretty(&Explanation {
|
||||||
code: rule.code(),
|
code: rule.code(),
|
||||||
origin: rule.origin().name(),
|
linter: rule.origin().name(),
|
||||||
summary: rule.message_formats()[0],
|
summary: rule.message_formats()[0],
|
||||||
})?
|
})?
|
||||||
);
|
);
|
||||||
|
|
|
@ -13,7 +13,7 @@ use assert_cmd::Command;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use log::info;
|
use log::info;
|
||||||
use ruff::logging::{set_up_logging, LogLevel};
|
use ruff::logging::{set_up_logging, LogLevel};
|
||||||
use ruff::registry::RuleOrigin;
|
use ruff::registry::Linter;
|
||||||
use strum::IntoEnumIterator;
|
use strum::IntoEnumIterator;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
|
@ -175,12 +175,12 @@ fn test_ruff_black_compatibility() -> Result<()> {
|
||||||
.filter_map(Result::ok)
|
.filter_map(Result::ok)
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let codes = RuleOrigin::iter()
|
let codes = Linter::iter()
|
||||||
// Exclude ruff codes, specifically RUF100, because it causes differences that are not a
|
// Exclude ruff codes, specifically RUF100, because it causes differences that are not a
|
||||||
// problem. Ruff would add a `# noqa: W292` after the first run, black introduces a
|
// problem. Ruff would add a `# noqa: W292` after the first run, black introduces a
|
||||||
// newline, and ruff removes the `# noqa: W292` again.
|
// newline, and ruff removes the `# noqa: W292` again.
|
||||||
.filter(|origin| *origin != RuleOrigin::Ruff)
|
.filter(|linter| *linter != Linter::Ruff)
|
||||||
.map(|origin| origin.prefixes().as_list(","))
|
.map(|linter| linter.prefixes().as_list(","))
|
||||||
.join(",");
|
.join(",");
|
||||||
let ruff_args = [
|
let ruff_args = [
|
||||||
"-",
|
"-",
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use ruff::registry::{Prefixes, RuleCodePrefix, RuleOrigin};
|
use ruff::registry::{Linter, Prefixes, RuleCodePrefix};
|
||||||
use strum::IntoEnumIterator;
|
use strum::IntoEnumIterator;
|
||||||
|
|
||||||
use crate::utils::replace_readme_section;
|
use crate::utils::replace_readme_section;
|
||||||
|
@ -47,22 +47,22 @@ pub fn main(cli: &Cli) -> Result<()> {
|
||||||
// Generate the table string.
|
// Generate the table string.
|
||||||
let mut table_out = String::new();
|
let mut table_out = String::new();
|
||||||
let mut toc_out = String::new();
|
let mut toc_out = String::new();
|
||||||
for origin in RuleOrigin::iter() {
|
for linter in Linter::iter() {
|
||||||
let prefixes = origin.prefixes();
|
let prefixes = linter.prefixes();
|
||||||
let codes_csv: String = prefixes.as_list(", ");
|
let codes_csv: String = prefixes.as_list(", ");
|
||||||
table_out.push_str(&format!("### {} ({codes_csv})", origin.name()));
|
table_out.push_str(&format!("### {} ({codes_csv})", linter.name()));
|
||||||
table_out.push('\n');
|
table_out.push('\n');
|
||||||
table_out.push('\n');
|
table_out.push('\n');
|
||||||
|
|
||||||
toc_out.push_str(&format!(
|
toc_out.push_str(&format!(
|
||||||
" 1. [{} ({})](#{}-{})\n",
|
" 1. [{} ({})](#{}-{})\n",
|
||||||
origin.name(),
|
linter.name(),
|
||||||
codes_csv,
|
codes_csv,
|
||||||
origin.name().to_lowercase().replace(' ', "-"),
|
linter.name().to_lowercase().replace(' ', "-"),
|
||||||
codes_csv.to_lowercase().replace(',', "-").replace(' ', "")
|
codes_csv.to_lowercase().replace(',', "-").replace(' ', "")
|
||||||
));
|
));
|
||||||
|
|
||||||
if let Some(url) = origin.url() {
|
if let Some(url) = linter.url() {
|
||||||
let host = url
|
let host = url
|
||||||
.trim_start_matches("https://")
|
.trim_start_matches("https://")
|
||||||
.split('/')
|
.split('/')
|
||||||
|
@ -70,7 +70,7 @@ pub fn main(cli: &Cli) -> Result<()> {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
table_out.push_str(&format!(
|
table_out.push_str(&format!(
|
||||||
"For more, see [{}]({}) on {}.",
|
"For more, see [{}]({}) on {}.",
|
||||||
origin.name(),
|
linter.name(),
|
||||||
url,
|
url,
|
||||||
match host {
|
match host {
|
||||||
"pypi.org" => "PyPI",
|
"pypi.org" => "PyPI",
|
||||||
|
@ -78,7 +78,7 @@ pub fn main(cli: &Cli) -> Result<()> {
|
||||||
host => panic!(
|
host => panic!(
|
||||||
"unexpected host in URL of {}, expected pypi.org or github.com but found \
|
"unexpected host in URL of {}, expected pypi.org or github.com but found \
|
||||||
{host}",
|
{host}",
|
||||||
origin.name()
|
linter.name()
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
));
|
));
|
||||||
|
|
|
@ -30,7 +30,7 @@ pub fn define_rule_mapping(mapping: &Mapping) -> proc_macro2::TokenStream {
|
||||||
.extend(quote! {Self::#name => <#path as Violation>::message_formats(),});
|
.extend(quote! {Self::#name => <#path as Violation>::message_formats(),});
|
||||||
rule_autofixable_match_arms.extend(quote! {Self::#name => <#path as Violation>::AUTOFIX,});
|
rule_autofixable_match_arms.extend(quote! {Self::#name => <#path as Violation>::AUTOFIX,});
|
||||||
let origin = get_origin(code);
|
let origin = get_origin(code);
|
||||||
rule_origin_match_arms.extend(quote! {Self::#name => RuleOrigin::#origin,});
|
rule_origin_match_arms.extend(quote! {Self::#name => Linter::#origin,});
|
||||||
rule_code_match_arms.extend(quote! {Self::#name => #code_str,});
|
rule_code_match_arms.extend(quote! {Self::#name => #code_str,});
|
||||||
rule_from_code_match_arms.extend(quote! {#code_str => Ok(&Rule::#name), });
|
rule_from_code_match_arms.extend(quote! {#code_str => Ok(&Rule::#name), });
|
||||||
diagkind_code_match_arms.extend(quote! {Self::#name(..) => &Rule::#name, });
|
diagkind_code_match_arms.extend(quote! {Self::#name(..) => &Rule::#name, });
|
||||||
|
@ -95,7 +95,7 @@ pub fn define_rule_mapping(mapping: &Mapping) -> proc_macro2::TokenStream {
|
||||||
match self { #rule_autofixable_match_arms }
|
match self { #rule_autofixable_match_arms }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn origin(&self) -> RuleOrigin {
|
pub fn origin(&self) -> Linter {
|
||||||
match self { #rule_origin_match_arms }
|
match self { #rule_origin_match_arms }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -142,16 +142,16 @@ pub fn define_rule_mapping(mapping: &Mapping) -> proc_macro2::TokenStream {
|
||||||
|
|
||||||
fn get_origin(ident: &Ident) -> Ident {
|
fn get_origin(ident: &Ident) -> Ident {
|
||||||
let ident = ident.to_string();
|
let ident = ident.to_string();
|
||||||
let mut iter = crate::prefixes::PREFIX_TO_ORIGIN.iter();
|
let mut iter = crate::prefixes::PREFIX_TO_LINTER.iter();
|
||||||
let origin = loop {
|
let linter = loop {
|
||||||
let (prefix, origin) = iter
|
let (prefix, linter) = iter
|
||||||
.next()
|
.next()
|
||||||
.unwrap_or_else(|| panic!("code doesn't start with any recognized prefix: {ident}"));
|
.unwrap_or_else(|| panic!("code doesn't start with any recognized prefix: {ident}"));
|
||||||
if ident.starts_with(prefix) {
|
if ident.starts_with(prefix) {
|
||||||
break origin;
|
break linter;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
Ident::new(origin, Span::call_site())
|
Ident::new(linter, Span::call_site())
|
||||||
}
|
}
|
||||||
pub struct Mapping {
|
pub struct Mapping {
|
||||||
entries: Vec<(Ident, Path, Ident)>,
|
entries: Vec<(Ident, Path, Ident)>,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
// Longer prefixes should come first so that you can find an origin for a code
|
// Longer prefixes should come first so that you can find a linter for a code
|
||||||
// by simply picking the first entry that starts with the given prefix.
|
// by simply picking the first entry that starts with the given prefix.
|
||||||
|
|
||||||
pub const PREFIX_TO_ORIGIN: &[(&str, &str)] = &[
|
pub const PREFIX_TO_LINTER: &[(&str, &str)] = &[
|
||||||
("ANN", "Flake8Annotations"),
|
("ANN", "Flake8Annotations"),
|
||||||
("ARG", "Flake8UnusedArguments"),
|
("ARG", "Flake8UnusedArguments"),
|
||||||
("A", "Flake8Builtins"),
|
("A", "Flake8Builtins"),
|
||||||
|
@ -41,12 +41,12 @@ pub const PREFIX_TO_ORIGIN: &[(&str, &str)] = &[
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::PREFIX_TO_ORIGIN;
|
use super::PREFIX_TO_LINTER;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn order() {
|
fn order() {
|
||||||
for (idx, (prefix, _)) in PREFIX_TO_ORIGIN.iter().enumerate() {
|
for (idx, (prefix, _)) in PREFIX_TO_LINTER.iter().enumerate() {
|
||||||
for (prior_prefix, _) in PREFIX_TO_ORIGIN[..idx].iter() {
|
for (prior_prefix, _) in PREFIX_TO_LINTER[..idx].iter() {
|
||||||
assert!(!prefix.starts_with(prior_prefix));
|
assert!(!prefix.starts_with(prior_prefix));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,13 +5,13 @@ from pathlib import Path
|
||||||
ROOT_DIR = Path(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
ROOT_DIR = Path(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
def dir_name(origin: str) -> str:
|
def dir_name(linter_name: str) -> str:
|
||||||
return origin.replace("-", "_")
|
return linter_name.replace("-", "_")
|
||||||
|
|
||||||
|
|
||||||
def pascal_case(origin: str) -> str:
|
def pascal_case(linter_name: str) -> str:
|
||||||
"""Convert from snake-case to PascalCase."""
|
"""Convert from snake-case to PascalCase."""
|
||||||
return "".join(word.title() for word in origin.split("-"))
|
return "".join(word.title() for word in linter_name.split("-"))
|
||||||
|
|
||||||
|
|
||||||
def get_indent(line: str) -> str:
|
def get_indent(line: str) -> str:
|
||||||
|
|
|
@ -78,10 +78,10 @@ mod tests {
|
||||||
fp.write(f"{indent}{pascal_case(plugin)},")
|
fp.write(f"{indent}{pascal_case(plugin)},")
|
||||||
fp.write("\n")
|
fp.write("\n")
|
||||||
|
|
||||||
elif line.strip() == "RuleOrigin::Ruff => Prefixes::Single(RuleCodePrefix::RUF),":
|
elif line.strip() == "Linter::Ruff => Prefixes::Single(RuleCodePrefix::RUF),":
|
||||||
prefix = 'todo!("Fill-in prefix after generating codes")'
|
prefix = 'todo!("Fill-in prefix after generating codes")'
|
||||||
fp.write(
|
fp.write(
|
||||||
f"{indent}RuleOrigin::{pascal_case(plugin)} => Prefixes::Single({prefix}),"
|
f"{indent}Linter::{pascal_case(plugin)} => Prefixes::Single({prefix}),"
|
||||||
)
|
)
|
||||||
fp.write("\n")
|
fp.write("\n")
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ Example usage:
|
||||||
python scripts/add_rule.py \
|
python scripts/add_rule.py \
|
||||||
--name PreferListBuiltin \
|
--name PreferListBuiltin \
|
||||||
--code PIE807 \
|
--code PIE807 \
|
||||||
--origin flake8-pie
|
--linter flake8-pie
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
@ -19,16 +19,16 @@ def snake_case(name: str) -> str:
|
||||||
return "".join(f"_{word.lower()}" if word.isupper() else word for word in name).lstrip("_")
|
return "".join(f"_{word.lower()}" if word.isupper() else word for word in name).lstrip("_")
|
||||||
|
|
||||||
|
|
||||||
def main(*, name: str, code: str, origin: str) -> None:
|
def main(*, name: str, code: str, linter: str) -> None:
|
||||||
# Create a test fixture.
|
# Create a test fixture.
|
||||||
with open(
|
with open(
|
||||||
ROOT_DIR / "resources/test/fixtures" / dir_name(origin) / f"{code}.py",
|
ROOT_DIR / "resources/test/fixtures" / dir_name(linter) / f"{code}.py",
|
||||||
"a",
|
"a",
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Add the relevant `#testcase` macro.
|
# Add the relevant `#testcase` macro.
|
||||||
mod_rs = ROOT_DIR / "src/rules" / dir_name(origin) / "mod.rs"
|
mod_rs = ROOT_DIR / "src/rules" / dir_name(linter) / "mod.rs"
|
||||||
content = mod_rs.read_text()
|
content = mod_rs.read_text()
|
||||||
|
|
||||||
with open(mod_rs, "w") as fp:
|
with open(mod_rs, "w") as fp:
|
||||||
|
@ -42,7 +42,7 @@ def main(*, name: str, code: str, origin: str) -> None:
|
||||||
fp.write("\n")
|
fp.write("\n")
|
||||||
|
|
||||||
# Add the relevant rule function.
|
# Add the relevant rule function.
|
||||||
with open(ROOT_DIR / "src/rules" / dir_name(origin) / (snake_case(name) + ".rs"), "w") as fp:
|
with open(ROOT_DIR / "src/rules" / dir_name(linter) / (snake_case(name) + ".rs"), "w") as fp:
|
||||||
fp.write(
|
fp.write(
|
||||||
f"""
|
f"""
|
||||||
/// {code}
|
/// {code}
|
||||||
|
@ -59,7 +59,7 @@ pub fn {snake_case(name)}(checker: &mut Checker) {{}}
|
||||||
fp.write(line)
|
fp.write(line)
|
||||||
fp.write("\n")
|
fp.write("\n")
|
||||||
|
|
||||||
if line.startswith(f"// {origin}"):
|
if line.startswith(f"// {linter}"):
|
||||||
fp.write(
|
fp.write(
|
||||||
"""define_violation!(
|
"""define_violation!(
|
||||||
pub struct %s;
|
pub struct %s;
|
||||||
|
@ -96,7 +96,7 @@ impl Violation for %s {
|
||||||
if not seen_macro:
|
if not seen_macro:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if line.strip() == f"// {origin}":
|
if line.strip() == f"// {linter}":
|
||||||
indent = get_indent(line)
|
indent = get_indent(line)
|
||||||
fp.write(f"{indent}{code} => violations::{name},")
|
fp.write(f"{indent}{code} => violations::{name},")
|
||||||
fp.write("\n")
|
fp.write("\n")
|
||||||
|
@ -108,7 +108,7 @@ impl Violation for %s {
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Generate boilerplate for a new rule.",
|
description="Generate boilerplate for a new rule.",
|
||||||
epilog="python scripts/add_rule.py --name PreferListBuiltin --code PIE807 --origin flake8-pie",
|
epilog="python scripts/add_rule.py --name PreferListBuiltin --code PIE807 --linter flake8-pie",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--name",
|
"--name",
|
||||||
|
@ -123,11 +123,11 @@ if __name__ == "__main__":
|
||||||
help="The code of the check to generate (e.g., 'A001').",
|
help="The code of the check to generate (e.g., 'A001').",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--origin",
|
"--linter",
|
||||||
type=str,
|
type=str,
|
||||||
required=True,
|
required=True,
|
||||||
help="The source with which the check originated (e.g., 'flake8-builtins').",
|
help="The source with which the check originated (e.g., 'flake8-builtins').",
|
||||||
)
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
main(name=args.name, code=args.code, origin=args.origin)
|
main(name=args.name, code=args.code, linter=args.linter)
|
||||||
|
|
|
@ -433,7 +433,7 @@ ruff_macros::define_rule_mapping!(
|
||||||
);
|
);
|
||||||
|
|
||||||
#[derive(EnumIter, Debug, PartialEq, Eq)]
|
#[derive(EnumIter, Debug, PartialEq, Eq)]
|
||||||
pub enum RuleOrigin {
|
pub enum Linter {
|
||||||
Pyflakes,
|
Pyflakes,
|
||||||
Pycodestyle,
|
Pycodestyle,
|
||||||
McCabe,
|
McCabe,
|
||||||
|
@ -489,55 +489,55 @@ impl Prefixes {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/origin.rs"));
|
include!(concat!(env!("OUT_DIR"), "/linter.rs"));
|
||||||
|
|
||||||
impl RuleOrigin {
|
impl Linter {
|
||||||
pub fn prefixes(&self) -> Prefixes {
|
pub fn prefixes(&self) -> Prefixes {
|
||||||
match self {
|
match self {
|
||||||
RuleOrigin::Eradicate => Prefixes::Single(RuleCodePrefix::ERA),
|
Linter::Eradicate => Prefixes::Single(RuleCodePrefix::ERA),
|
||||||
RuleOrigin::Flake82020 => Prefixes::Single(RuleCodePrefix::YTT),
|
Linter::Flake82020 => Prefixes::Single(RuleCodePrefix::YTT),
|
||||||
RuleOrigin::Flake8Annotations => Prefixes::Single(RuleCodePrefix::ANN),
|
Linter::Flake8Annotations => Prefixes::Single(RuleCodePrefix::ANN),
|
||||||
RuleOrigin::Flake8Bandit => Prefixes::Single(RuleCodePrefix::S),
|
Linter::Flake8Bandit => Prefixes::Single(RuleCodePrefix::S),
|
||||||
RuleOrigin::Flake8BlindExcept => Prefixes::Single(RuleCodePrefix::BLE),
|
Linter::Flake8BlindExcept => Prefixes::Single(RuleCodePrefix::BLE),
|
||||||
RuleOrigin::Flake8BooleanTrap => Prefixes::Single(RuleCodePrefix::FBT),
|
Linter::Flake8BooleanTrap => Prefixes::Single(RuleCodePrefix::FBT),
|
||||||
RuleOrigin::Flake8Bugbear => Prefixes::Single(RuleCodePrefix::B),
|
Linter::Flake8Bugbear => Prefixes::Single(RuleCodePrefix::B),
|
||||||
RuleOrigin::Flake8Builtins => Prefixes::Single(RuleCodePrefix::A),
|
Linter::Flake8Builtins => Prefixes::Single(RuleCodePrefix::A),
|
||||||
RuleOrigin::Flake8Comprehensions => Prefixes::Single(RuleCodePrefix::C4),
|
Linter::Flake8Comprehensions => Prefixes::Single(RuleCodePrefix::C4),
|
||||||
RuleOrigin::Flake8Datetimez => Prefixes::Single(RuleCodePrefix::DTZ),
|
Linter::Flake8Datetimez => Prefixes::Single(RuleCodePrefix::DTZ),
|
||||||
RuleOrigin::Flake8Debugger => Prefixes::Single(RuleCodePrefix::T10),
|
Linter::Flake8Debugger => Prefixes::Single(RuleCodePrefix::T10),
|
||||||
RuleOrigin::Flake8ErrMsg => Prefixes::Single(RuleCodePrefix::EM),
|
Linter::Flake8ErrMsg => Prefixes::Single(RuleCodePrefix::EM),
|
||||||
RuleOrigin::Flake8ImplicitStrConcat => Prefixes::Single(RuleCodePrefix::ISC),
|
Linter::Flake8ImplicitStrConcat => Prefixes::Single(RuleCodePrefix::ISC),
|
||||||
RuleOrigin::Flake8ImportConventions => Prefixes::Single(RuleCodePrefix::ICN),
|
Linter::Flake8ImportConventions => Prefixes::Single(RuleCodePrefix::ICN),
|
||||||
RuleOrigin::Flake8Print => Prefixes::Single(RuleCodePrefix::T20),
|
Linter::Flake8Print => Prefixes::Single(RuleCodePrefix::T20),
|
||||||
RuleOrigin::Flake8PytestStyle => Prefixes::Single(RuleCodePrefix::PT),
|
Linter::Flake8PytestStyle => Prefixes::Single(RuleCodePrefix::PT),
|
||||||
RuleOrigin::Flake8Quotes => Prefixes::Single(RuleCodePrefix::Q),
|
Linter::Flake8Quotes => Prefixes::Single(RuleCodePrefix::Q),
|
||||||
RuleOrigin::Flake8Return => Prefixes::Single(RuleCodePrefix::RET),
|
Linter::Flake8Return => Prefixes::Single(RuleCodePrefix::RET),
|
||||||
RuleOrigin::Flake8Simplify => Prefixes::Single(RuleCodePrefix::SIM),
|
Linter::Flake8Simplify => Prefixes::Single(RuleCodePrefix::SIM),
|
||||||
RuleOrigin::Flake8TidyImports => Prefixes::Single(RuleCodePrefix::TID),
|
Linter::Flake8TidyImports => Prefixes::Single(RuleCodePrefix::TID),
|
||||||
RuleOrigin::Flake8UnusedArguments => Prefixes::Single(RuleCodePrefix::ARG),
|
Linter::Flake8UnusedArguments => Prefixes::Single(RuleCodePrefix::ARG),
|
||||||
RuleOrigin::Isort => Prefixes::Single(RuleCodePrefix::I),
|
Linter::Isort => Prefixes::Single(RuleCodePrefix::I),
|
||||||
RuleOrigin::McCabe => Prefixes::Single(RuleCodePrefix::C90),
|
Linter::McCabe => Prefixes::Single(RuleCodePrefix::C90),
|
||||||
RuleOrigin::PEP8Naming => Prefixes::Single(RuleCodePrefix::N),
|
Linter::PEP8Naming => Prefixes::Single(RuleCodePrefix::N),
|
||||||
RuleOrigin::PandasVet => Prefixes::Single(RuleCodePrefix::PD),
|
Linter::PandasVet => Prefixes::Single(RuleCodePrefix::PD),
|
||||||
RuleOrigin::Pycodestyle => Prefixes::Multiple(vec![
|
Linter::Pycodestyle => Prefixes::Multiple(vec![
|
||||||
(RuleCodePrefix::E, "Error"),
|
(RuleCodePrefix::E, "Error"),
|
||||||
(RuleCodePrefix::W, "Warning"),
|
(RuleCodePrefix::W, "Warning"),
|
||||||
]),
|
]),
|
||||||
RuleOrigin::Pydocstyle => Prefixes::Single(RuleCodePrefix::D),
|
Linter::Pydocstyle => Prefixes::Single(RuleCodePrefix::D),
|
||||||
RuleOrigin::Pyflakes => Prefixes::Single(RuleCodePrefix::F),
|
Linter::Pyflakes => Prefixes::Single(RuleCodePrefix::F),
|
||||||
RuleOrigin::PygrepHooks => Prefixes::Single(RuleCodePrefix::PGH),
|
Linter::PygrepHooks => Prefixes::Single(RuleCodePrefix::PGH),
|
||||||
RuleOrigin::Pylint => Prefixes::Multiple(vec![
|
Linter::Pylint => Prefixes::Multiple(vec![
|
||||||
(RuleCodePrefix::PLC, "Convention"),
|
(RuleCodePrefix::PLC, "Convention"),
|
||||||
(RuleCodePrefix::PLE, "Error"),
|
(RuleCodePrefix::PLE, "Error"),
|
||||||
(RuleCodePrefix::PLR, "Refactor"),
|
(RuleCodePrefix::PLR, "Refactor"),
|
||||||
(RuleCodePrefix::PLW, "Warning"),
|
(RuleCodePrefix::PLW, "Warning"),
|
||||||
]),
|
]),
|
||||||
RuleOrigin::Pyupgrade => Prefixes::Single(RuleCodePrefix::UP),
|
Linter::Pyupgrade => Prefixes::Single(RuleCodePrefix::UP),
|
||||||
RuleOrigin::Flake8Pie => Prefixes::Single(RuleCodePrefix::PIE),
|
Linter::Flake8Pie => Prefixes::Single(RuleCodePrefix::PIE),
|
||||||
RuleOrigin::Flake8Commas => Prefixes::Single(RuleCodePrefix::COM),
|
Linter::Flake8Commas => Prefixes::Single(RuleCodePrefix::COM),
|
||||||
RuleOrigin::Flake8NoPep420 => Prefixes::Single(RuleCodePrefix::INP),
|
Linter::Flake8NoPep420 => Prefixes::Single(RuleCodePrefix::INP),
|
||||||
RuleOrigin::Flake8Executable => Prefixes::Single(RuleCodePrefix::EXE),
|
Linter::Flake8Executable => Prefixes::Single(RuleCodePrefix::EXE),
|
||||||
RuleOrigin::Ruff => Prefixes::Single(RuleCodePrefix::RUF),
|
Linter::Ruff => Prefixes::Single(RuleCodePrefix::RUF),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue