refactor: Rename RuleOrigin to Linter

"origin" was accurate since ruff rules are currently always modeled
after one origin (except the Ruff-specific rules).

Since we however want to introduce a many-to-many mapping between codes
and rules, the term "origin" no longer makes much sense. Rules usually
don't have multiple origins but one linter implements a rule first and
then others implement it later (often inspired from another linter).
But we don't actually care much about where a rule originates from when
mapping multiple rule codes to one rule implementation, so renaming
RuleOrigin to Linter is less confusing with the many-to-many system.
This commit is contained in:
Martin Fischer 2023-01-20 14:27:27 +01:00 committed by Charlie Marsh
parent babe1eb7be
commit 7fc42f8f85
12 changed files with 95 additions and 95 deletions

View file

@ -94,7 +94,7 @@ jobs:
profile: minimal
override: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/add_rule.py --name DoTheThing --code PLC999 --origin pylint
- run: ./scripts/add_rule.py --name DoTheThing --code PLC999 --linter pylint
- run: cargo check
- run: ./scripts/add_plugin.py test --url https://pypi.org/project/-test/0.1.0/
- run: cargo check

View file

@ -69,7 +69,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
6. Update the generated files (documentation and generated code).
To define the violation, start by creating a dedicated file for your rule under the appropriate
rule origin (e.g., `src/rules/flake8_bugbear/rules/abstract_base_class.rs`). That file should
rule linter (e.g., `src/rules/flake8_bugbear/rules/abstract_base_class.rs`). That file should
contain a struct defined via `define_violation!`, along with a function that creates the violation
based on any required inputs. (Many of the existing examples live in `src/violations.rs`, but we're
looking to place new rules in their own files.)
@ -81,7 +81,7 @@ collecting diagnostics as it goes.
If you need to inspect the AST, you can run `cargo +nightly dev print-ast` with a Python file. Grep
for the `Check::new` invocations to understand how other, similar rules are implemented.
To add a test fixture, create a file under `resources/test/fixtures/[origin]`, named to match
To add a test fixture, create a file under `resources/test/fixtures/[linter]`, named to match
the code you defined earlier (e.g., `resources/test/fixtures/pycodestyle/E402.py`). This file should
contain a variety of violations and non-violations designed to evaluate and demonstrate the behavior
of your lint rule.
@ -90,7 +90,7 @@ Run `cargo +nightly dev generate-all` to generate the code for your new fixture.
locally with (e.g.) `cargo run resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402`.
Once you're satisfied with the output, codify the behavior as a snapshot test by adding a new
`test_case` macro in the relevant `src/[origin]/mod.rs` file. Then, run `cargo test --all`.
`test_case` macro in the relevant `src/[linter]/mod.rs` file. Then, run `cargo test --all`.
Your test will fail, but you'll be prompted to follow-up with `cargo insta review`. Accept the
generated snapshot, then commit the snapshot file alongside the rest of your changes.

View file

@ -4,7 +4,7 @@ use std::path::{Path, PathBuf};
fn main() {
let out_dir = PathBuf::from(std::env::var_os("OUT_DIR").unwrap());
generate_origin_name_and_url(&out_dir);
generate_linter_name_and_url(&out_dir);
}
const RULES_SUBMODULE_DOC_PREFIX: &str = "//! Rules from ";
@ -15,13 +15,13 @@ const RULES_SUBMODULE_DOC_PREFIX: &str = "//! Rules from ";
/// //! Rules from [Pyflakes](https://pypi.org/project/pyflakes/2.5.0/).
///
/// This function extracts the link label and url from these comments and
/// generates the `name` and `url` functions for the `RuleOrigin` enum
/// generates the `name` and `url` functions for the `Linter` enum
/// accordingly, so that they can be used by `ruff_dev::generate_rules_table`.
fn generate_origin_name_and_url(out_dir: &Path) {
fn generate_linter_name_and_url(out_dir: &Path) {
println!("cargo:rerun-if-changed=src/rules/");
let mut name_match_arms: String = r#"RuleOrigin::Ruff => "Ruff-specific rules","#.into();
let mut url_match_arms: String = r#"RuleOrigin::Ruff => None,"#.into();
let mut name_match_arms: String = r#"Linter::Ruff => "Ruff-specific rules","#.into();
let mut url_match_arms: String = r#"Linter::Ruff => None,"#.into();
for file in fs::read_dir("src/rules/")
.unwrap()
@ -62,14 +62,14 @@ fn generate_origin_name_and_url(out_dir: &Path) {
})
.collect::<String>();
name_match_arms.push_str(&format!(r#"RuleOrigin::{variant_name} => "{name}","#));
url_match_arms.push_str(&format!(r#"RuleOrigin::{variant_name} => Some("{url}"),"#));
name_match_arms.push_str(&format!(r#"Linter::{variant_name} => "{name}","#));
url_match_arms.push_str(&format!(r#"Linter::{variant_name} => Some("{url}"),"#));
}
write!(
BufWriter::new(fs::File::create(out_dir.join("origin.rs")).unwrap()),
BufWriter::new(fs::File::create(out_dir.join("linter.rs")).unwrap()),
"
impl RuleOrigin {{
impl Linter {{
pub fn name(&self) -> &'static str {{
match self {{ {name_match_arms} }}
}}

View file

@ -285,7 +285,7 @@ pub fn show_files(
#[derive(Serialize)]
struct Explanation<'a> {
code: &'a str,
origin: &'a str,
linter: &'a str,
summary: &'a str,
}
@ -315,7 +315,7 @@ pub fn explain(rule: &Rule, format: SerializationFormat) -> Result<()> {
"{}",
serde_json::to_string_pretty(&Explanation {
code: rule.code(),
origin: rule.origin().name(),
linter: rule.origin().name(),
summary: rule.message_formats()[0],
})?
);

View file

@ -13,7 +13,7 @@ use assert_cmd::Command;
use itertools::Itertools;
use log::info;
use ruff::logging::{set_up_logging, LogLevel};
use ruff::registry::RuleOrigin;
use ruff::registry::Linter;
use strum::IntoEnumIterator;
use walkdir::WalkDir;
@ -175,12 +175,12 @@ fn test_ruff_black_compatibility() -> Result<()> {
.filter_map(Result::ok)
.collect();
let codes = RuleOrigin::iter()
let codes = Linter::iter()
// Exclude ruff codes, specifically RUF100, because it causes differences that are not a
// problem. Ruff would add a `# noqa: W292` after the first run, black introduces a
// newline, and ruff removes the `# noqa: W292` again.
.filter(|origin| *origin != RuleOrigin::Ruff)
.map(|origin| origin.prefixes().as_list(","))
.filter(|linter| *linter != Linter::Ruff)
.map(|linter| linter.prefixes().as_list(","))
.join(",");
let ruff_args = [
"-",

View file

@ -2,7 +2,7 @@
use anyhow::Result;
use clap::Args;
use ruff::registry::{Prefixes, RuleCodePrefix, RuleOrigin};
use ruff::registry::{Linter, Prefixes, RuleCodePrefix};
use strum::IntoEnumIterator;
use crate::utils::replace_readme_section;
@ -47,22 +47,22 @@ pub fn main(cli: &Cli) -> Result<()> {
// Generate the table string.
let mut table_out = String::new();
let mut toc_out = String::new();
for origin in RuleOrigin::iter() {
let prefixes = origin.prefixes();
for linter in Linter::iter() {
let prefixes = linter.prefixes();
let codes_csv: String = prefixes.as_list(", ");
table_out.push_str(&format!("### {} ({codes_csv})", origin.name()));
table_out.push_str(&format!("### {} ({codes_csv})", linter.name()));
table_out.push('\n');
table_out.push('\n');
toc_out.push_str(&format!(
" 1. [{} ({})](#{}-{})\n",
origin.name(),
linter.name(),
codes_csv,
origin.name().to_lowercase().replace(' ', "-"),
linter.name().to_lowercase().replace(' ', "-"),
codes_csv.to_lowercase().replace(',', "-").replace(' ', "")
));
if let Some(url) = origin.url() {
if let Some(url) = linter.url() {
let host = url
.trim_start_matches("https://")
.split('/')
@ -70,7 +70,7 @@ pub fn main(cli: &Cli) -> Result<()> {
.unwrap();
table_out.push_str(&format!(
"For more, see [{}]({}) on {}.",
origin.name(),
linter.name(),
url,
match host {
"pypi.org" => "PyPI",
@ -78,7 +78,7 @@ pub fn main(cli: &Cli) -> Result<()> {
host => panic!(
"unexpected host in URL of {}, expected pypi.org or github.com but found \
{host}",
origin.name()
linter.name()
),
}
));

View file

@ -30,7 +30,7 @@ pub fn define_rule_mapping(mapping: &Mapping) -> proc_macro2::TokenStream {
.extend(quote! {Self::#name => <#path as Violation>::message_formats(),});
rule_autofixable_match_arms.extend(quote! {Self::#name => <#path as Violation>::AUTOFIX,});
let origin = get_origin(code);
rule_origin_match_arms.extend(quote! {Self::#name => RuleOrigin::#origin,});
rule_origin_match_arms.extend(quote! {Self::#name => Linter::#origin,});
rule_code_match_arms.extend(quote! {Self::#name => #code_str,});
rule_from_code_match_arms.extend(quote! {#code_str => Ok(&Rule::#name), });
diagkind_code_match_arms.extend(quote! {Self::#name(..) => &Rule::#name, });
@ -95,7 +95,7 @@ pub fn define_rule_mapping(mapping: &Mapping) -> proc_macro2::TokenStream {
match self { #rule_autofixable_match_arms }
}
pub fn origin(&self) -> RuleOrigin {
pub fn origin(&self) -> Linter {
match self { #rule_origin_match_arms }
}
@ -142,16 +142,16 @@ pub fn define_rule_mapping(mapping: &Mapping) -> proc_macro2::TokenStream {
fn get_origin(ident: &Ident) -> Ident {
let ident = ident.to_string();
let mut iter = crate::prefixes::PREFIX_TO_ORIGIN.iter();
let origin = loop {
let (prefix, origin) = iter
let mut iter = crate::prefixes::PREFIX_TO_LINTER.iter();
let linter = loop {
let (prefix, linter) = iter
.next()
.unwrap_or_else(|| panic!("code doesn't start with any recognized prefix: {ident}"));
if ident.starts_with(prefix) {
break origin;
break linter;
}
};
Ident::new(origin, Span::call_site())
Ident::new(linter, Span::call_site())
}
pub struct Mapping {
entries: Vec<(Ident, Path, Ident)>,

View file

@ -1,7 +1,7 @@
// Longer prefixes should come first so that you can find an origin for a code
// Longer prefixes should come first so that you can find a linter for a code
// by simply picking the first entry that starts with the given prefix.
pub const PREFIX_TO_ORIGIN: &[(&str, &str)] = &[
pub const PREFIX_TO_LINTER: &[(&str, &str)] = &[
("ANN", "Flake8Annotations"),
("ARG", "Flake8UnusedArguments"),
("A", "Flake8Builtins"),
@ -41,12 +41,12 @@ pub const PREFIX_TO_ORIGIN: &[(&str, &str)] = &[
#[cfg(test)]
mod tests {
use super::PREFIX_TO_ORIGIN;
use super::PREFIX_TO_LINTER;
#[test]
fn order() {
for (idx, (prefix, _)) in PREFIX_TO_ORIGIN.iter().enumerate() {
for (prior_prefix, _) in PREFIX_TO_ORIGIN[..idx].iter() {
for (idx, (prefix, _)) in PREFIX_TO_LINTER.iter().enumerate() {
for (prior_prefix, _) in PREFIX_TO_LINTER[..idx].iter() {
assert!(!prefix.starts_with(prior_prefix));
}
}

View file

@ -5,13 +5,13 @@ from pathlib import Path
ROOT_DIR = Path(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def dir_name(origin: str) -> str:
return origin.replace("-", "_")
def dir_name(linter_name: str) -> str:
return linter_name.replace("-", "_")
def pascal_case(origin: str) -> str:
def pascal_case(linter_name: str) -> str:
"""Convert from snake-case to PascalCase."""
return "".join(word.title() for word in origin.split("-"))
return "".join(word.title() for word in linter_name.split("-"))
def get_indent(line: str) -> str:

View file

@ -78,10 +78,10 @@ mod tests {
fp.write(f"{indent}{pascal_case(plugin)},")
fp.write("\n")
elif line.strip() == "RuleOrigin::Ruff => Prefixes::Single(RuleCodePrefix::RUF),":
elif line.strip() == "Linter::Ruff => Prefixes::Single(RuleCodePrefix::RUF),":
prefix = 'todo!("Fill-in prefix after generating codes")'
fp.write(
f"{indent}RuleOrigin::{pascal_case(plugin)} => Prefixes::Single({prefix}),"
f"{indent}Linter::{pascal_case(plugin)} => Prefixes::Single({prefix}),"
)
fp.write("\n")

View file

@ -6,7 +6,7 @@ Example usage:
python scripts/add_rule.py \
--name PreferListBuiltin \
--code PIE807 \
--origin flake8-pie
--linter flake8-pie
"""
import argparse
@ -19,16 +19,16 @@ def snake_case(name: str) -> str:
return "".join(f"_{word.lower()}" if word.isupper() else word for word in name).lstrip("_")
def main(*, name: str, code: str, origin: str) -> None:
def main(*, name: str, code: str, linter: str) -> None:
# Create a test fixture.
with open(
ROOT_DIR / "resources/test/fixtures" / dir_name(origin) / f"{code}.py",
ROOT_DIR / "resources/test/fixtures" / dir_name(linter) / f"{code}.py",
"a",
):
pass
# Add the relevant `#testcase` macro.
mod_rs = ROOT_DIR / "src/rules" / dir_name(origin) / "mod.rs"
mod_rs = ROOT_DIR / "src/rules" / dir_name(linter) / "mod.rs"
content = mod_rs.read_text()
with open(mod_rs, "w") as fp:
@ -42,7 +42,7 @@ def main(*, name: str, code: str, origin: str) -> None:
fp.write("\n")
# Add the relevant rule function.
with open(ROOT_DIR / "src/rules" / dir_name(origin) / (snake_case(name) + ".rs"), "w") as fp:
with open(ROOT_DIR / "src/rules" / dir_name(linter) / (snake_case(name) + ".rs"), "w") as fp:
fp.write(
f"""
/// {code}
@ -59,7 +59,7 @@ pub fn {snake_case(name)}(checker: &mut Checker) {{}}
fp.write(line)
fp.write("\n")
if line.startswith(f"// {origin}"):
if line.startswith(f"// {linter}"):
fp.write(
"""define_violation!(
pub struct %s;
@ -96,7 +96,7 @@ impl Violation for %s {
if not seen_macro:
continue
if line.strip() == f"// {origin}":
if line.strip() == f"// {linter}":
indent = get_indent(line)
fp.write(f"{indent}{code} => violations::{name},")
fp.write("\n")
@ -108,7 +108,7 @@ impl Violation for %s {
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Generate boilerplate for a new rule.",
epilog="python scripts/add_rule.py --name PreferListBuiltin --code PIE807 --origin flake8-pie",
epilog="python scripts/add_rule.py --name PreferListBuiltin --code PIE807 --linter flake8-pie",
)
parser.add_argument(
"--name",
@ -123,11 +123,11 @@ if __name__ == "__main__":
help="The code of the check to generate (e.g., 'A001').",
)
parser.add_argument(
"--origin",
"--linter",
type=str,
required=True,
help="The source with which the check originated (e.g., 'flake8-builtins').",
)
args = parser.parse_args()
main(name=args.name, code=args.code, origin=args.origin)
main(name=args.name, code=args.code, linter=args.linter)

View file

@ -433,7 +433,7 @@ ruff_macros::define_rule_mapping!(
);
#[derive(EnumIter, Debug, PartialEq, Eq)]
pub enum RuleOrigin {
pub enum Linter {
Pyflakes,
Pycodestyle,
McCabe,
@ -489,55 +489,55 @@ impl Prefixes {
}
}
include!(concat!(env!("OUT_DIR"), "/origin.rs"));
include!(concat!(env!("OUT_DIR"), "/linter.rs"));
impl RuleOrigin {
impl Linter {
pub fn prefixes(&self) -> Prefixes {
match self {
RuleOrigin::Eradicate => Prefixes::Single(RuleCodePrefix::ERA),
RuleOrigin::Flake82020 => Prefixes::Single(RuleCodePrefix::YTT),
RuleOrigin::Flake8Annotations => Prefixes::Single(RuleCodePrefix::ANN),
RuleOrigin::Flake8Bandit => Prefixes::Single(RuleCodePrefix::S),
RuleOrigin::Flake8BlindExcept => Prefixes::Single(RuleCodePrefix::BLE),
RuleOrigin::Flake8BooleanTrap => Prefixes::Single(RuleCodePrefix::FBT),
RuleOrigin::Flake8Bugbear => Prefixes::Single(RuleCodePrefix::B),
RuleOrigin::Flake8Builtins => Prefixes::Single(RuleCodePrefix::A),
RuleOrigin::Flake8Comprehensions => Prefixes::Single(RuleCodePrefix::C4),
RuleOrigin::Flake8Datetimez => Prefixes::Single(RuleCodePrefix::DTZ),
RuleOrigin::Flake8Debugger => Prefixes::Single(RuleCodePrefix::T10),
RuleOrigin::Flake8ErrMsg => Prefixes::Single(RuleCodePrefix::EM),
RuleOrigin::Flake8ImplicitStrConcat => Prefixes::Single(RuleCodePrefix::ISC),
RuleOrigin::Flake8ImportConventions => Prefixes::Single(RuleCodePrefix::ICN),
RuleOrigin::Flake8Print => Prefixes::Single(RuleCodePrefix::T20),
RuleOrigin::Flake8PytestStyle => Prefixes::Single(RuleCodePrefix::PT),
RuleOrigin::Flake8Quotes => Prefixes::Single(RuleCodePrefix::Q),
RuleOrigin::Flake8Return => Prefixes::Single(RuleCodePrefix::RET),
RuleOrigin::Flake8Simplify => Prefixes::Single(RuleCodePrefix::SIM),
RuleOrigin::Flake8TidyImports => Prefixes::Single(RuleCodePrefix::TID),
RuleOrigin::Flake8UnusedArguments => Prefixes::Single(RuleCodePrefix::ARG),
RuleOrigin::Isort => Prefixes::Single(RuleCodePrefix::I),
RuleOrigin::McCabe => Prefixes::Single(RuleCodePrefix::C90),
RuleOrigin::PEP8Naming => Prefixes::Single(RuleCodePrefix::N),
RuleOrigin::PandasVet => Prefixes::Single(RuleCodePrefix::PD),
RuleOrigin::Pycodestyle => Prefixes::Multiple(vec![
Linter::Eradicate => Prefixes::Single(RuleCodePrefix::ERA),
Linter::Flake82020 => Prefixes::Single(RuleCodePrefix::YTT),
Linter::Flake8Annotations => Prefixes::Single(RuleCodePrefix::ANN),
Linter::Flake8Bandit => Prefixes::Single(RuleCodePrefix::S),
Linter::Flake8BlindExcept => Prefixes::Single(RuleCodePrefix::BLE),
Linter::Flake8BooleanTrap => Prefixes::Single(RuleCodePrefix::FBT),
Linter::Flake8Bugbear => Prefixes::Single(RuleCodePrefix::B),
Linter::Flake8Builtins => Prefixes::Single(RuleCodePrefix::A),
Linter::Flake8Comprehensions => Prefixes::Single(RuleCodePrefix::C4),
Linter::Flake8Datetimez => Prefixes::Single(RuleCodePrefix::DTZ),
Linter::Flake8Debugger => Prefixes::Single(RuleCodePrefix::T10),
Linter::Flake8ErrMsg => Prefixes::Single(RuleCodePrefix::EM),
Linter::Flake8ImplicitStrConcat => Prefixes::Single(RuleCodePrefix::ISC),
Linter::Flake8ImportConventions => Prefixes::Single(RuleCodePrefix::ICN),
Linter::Flake8Print => Prefixes::Single(RuleCodePrefix::T20),
Linter::Flake8PytestStyle => Prefixes::Single(RuleCodePrefix::PT),
Linter::Flake8Quotes => Prefixes::Single(RuleCodePrefix::Q),
Linter::Flake8Return => Prefixes::Single(RuleCodePrefix::RET),
Linter::Flake8Simplify => Prefixes::Single(RuleCodePrefix::SIM),
Linter::Flake8TidyImports => Prefixes::Single(RuleCodePrefix::TID),
Linter::Flake8UnusedArguments => Prefixes::Single(RuleCodePrefix::ARG),
Linter::Isort => Prefixes::Single(RuleCodePrefix::I),
Linter::McCabe => Prefixes::Single(RuleCodePrefix::C90),
Linter::PEP8Naming => Prefixes::Single(RuleCodePrefix::N),
Linter::PandasVet => Prefixes::Single(RuleCodePrefix::PD),
Linter::Pycodestyle => Prefixes::Multiple(vec![
(RuleCodePrefix::E, "Error"),
(RuleCodePrefix::W, "Warning"),
]),
RuleOrigin::Pydocstyle => Prefixes::Single(RuleCodePrefix::D),
RuleOrigin::Pyflakes => Prefixes::Single(RuleCodePrefix::F),
RuleOrigin::PygrepHooks => Prefixes::Single(RuleCodePrefix::PGH),
RuleOrigin::Pylint => Prefixes::Multiple(vec![
Linter::Pydocstyle => Prefixes::Single(RuleCodePrefix::D),
Linter::Pyflakes => Prefixes::Single(RuleCodePrefix::F),
Linter::PygrepHooks => Prefixes::Single(RuleCodePrefix::PGH),
Linter::Pylint => Prefixes::Multiple(vec![
(RuleCodePrefix::PLC, "Convention"),
(RuleCodePrefix::PLE, "Error"),
(RuleCodePrefix::PLR, "Refactor"),
(RuleCodePrefix::PLW, "Warning"),
]),
RuleOrigin::Pyupgrade => Prefixes::Single(RuleCodePrefix::UP),
RuleOrigin::Flake8Pie => Prefixes::Single(RuleCodePrefix::PIE),
RuleOrigin::Flake8Commas => Prefixes::Single(RuleCodePrefix::COM),
RuleOrigin::Flake8NoPep420 => Prefixes::Single(RuleCodePrefix::INP),
RuleOrigin::Flake8Executable => Prefixes::Single(RuleCodePrefix::EXE),
RuleOrigin::Ruff => Prefixes::Single(RuleCodePrefix::RUF),
Linter::Pyupgrade => Prefixes::Single(RuleCodePrefix::UP),
Linter::Flake8Pie => Prefixes::Single(RuleCodePrefix::PIE),
Linter::Flake8Commas => Prefixes::Single(RuleCodePrefix::COM),
Linter::Flake8NoPep420 => Prefixes::Single(RuleCodePrefix::INP),
Linter::Flake8Executable => Prefixes::Single(RuleCodePrefix::EXE),
Linter::Ruff => Prefixes::Single(RuleCodePrefix::RUF),
}
}
}