many-to-one 6/9: Implement ruff_macros::map_codes

This commit is contained in:
Martin Fischer 2023-02-03 03:18:30 +01:00 committed by Charlie Marsh
parent 9eda286dcd
commit c314e10e54
19 changed files with 505 additions and 271 deletions

View file

@ -4,10 +4,11 @@ use nohash_hasher::IntMap;
use rustpython_parser::ast::Location;
use crate::ast::types::Range;
use crate::codes::NoqaCode;
use crate::fix::Fix;
use crate::noqa;
use crate::noqa::{is_file_exempt, Directive};
use crate::registry::{Diagnostic, DiagnosticKind, NoqaCode, Rule};
use crate::registry::{Diagnostic, DiagnosticKind, Rule};
use crate::rule_redirects::get_redirect_target;
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA};
use crate::settings::{flags, Settings};

View file

@ -1,5 +1,6 @@
use crate::registry::{Linter, Rule};
#[ruff_macros::map_codes]
pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
#[allow(clippy::enum_glob_use)]
use Linter::*;

View file

@ -196,7 +196,8 @@ mod tests {
use anyhow::Result;
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
use crate::registry::{Linter, RuleCodePrefix};
use crate::codes;
use crate::registry::Linter;
use crate::rule_selector::RuleSelector;
use crate::settings::types::PatternPrefixPair;
@ -211,19 +212,25 @@ mod tests {
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401");
let expected = vec![RuleCodePrefix::F401.into()];
let expected = vec![codes::Pyflakes::_401.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,");
let expected = vec![RuleCodePrefix::F401.into()];
let expected = vec![codes::Pyflakes::_401.into()];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,E501");
let expected = vec![RuleCodePrefix::F401.into(), RuleCodePrefix::E501.into()];
let expected = vec![
codes::Pyflakes::_401.into(),
codes::Pycodestyle::E501.into(),
];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401, E501");
let expected = vec![RuleCodePrefix::F401.into(), RuleCodePrefix::E501.into()];
let expected = vec![
codes::Pyflakes::_401.into(),
codes::Pycodestyle::E501.into(),
];
assert_eq!(actual, expected);
}
@ -276,11 +283,11 @@ mod tests {
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "locust/test/*".to_string(),
prefix: RuleCodePrefix::F841.into(),
prefix: codes::Pyflakes::_841.into(),
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: RuleCodePrefix::F841.into(),
prefix: codes::Pyflakes::_841.into(),
},
];
assert_eq!(actual, expected);
@ -334,47 +341,47 @@ mod tests {
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "scrapy/__init__.py".to_string(),
prefix: RuleCodePrefix::E402.into(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
prefix: RuleCodePrefix::F401.into(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/http/__init__.py".to_string(),
prefix: RuleCodePrefix::F401.into(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: RuleCodePrefix::E402.into(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: RuleCodePrefix::F401.into(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/selector/__init__.py".to_string(),
prefix: RuleCodePrefix::F401.into(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: RuleCodePrefix::E402.into(),
prefix: codes::Pycodestyle::E402.into(),
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: RuleCodePrefix::F401.into(),
prefix: codes::Pyflakes::_401.into(),
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: RuleCodePrefix::F403.into(),
prefix: codes::Pyflakes::_403.into(),
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: RuleCodePrefix::F405.into(),
prefix: codes::Pyflakes::_405.into(),
},
PatternPrefixPair {
pattern: "tests/test_loader.py".to_string(),
prefix: RuleCodePrefix::E741.into(),
prefix: codes::Pycodestyle::E741.into(),
},
];
assert_eq!(actual, expected);

View file

@ -6,6 +6,7 @@ use serde::{Deserialize, Serialize};
use strum_macros::{AsRefStr, EnumIter};
use crate::ast::types::Range;
use crate::codes::{self, RuleCodePrefix};
use crate::fix::Fix;
use crate::rules;
use crate::violation::Violation;
@ -723,14 +724,17 @@ impl Linter {
pub const fn upstream_categories(&self) -> Option<&'static [UpstreamCategory]> {
match self {
Linter::Pycodestyle => Some(&[
UpstreamCategory(RuleCodePrefix::E, "Error"),
UpstreamCategory(RuleCodePrefix::W, "Warning"),
UpstreamCategory(RuleCodePrefix::Pycodestyle(codes::Pycodestyle::E), "Error"),
UpstreamCategory(
RuleCodePrefix::Pycodestyle(codes::Pycodestyle::W),
"Warning",
),
]),
Linter::Pylint => Some(&[
UpstreamCategory(RuleCodePrefix::PLC, "Convention"),
UpstreamCategory(RuleCodePrefix::PLE, "Error"),
UpstreamCategory(RuleCodePrefix::PLR, "Refactor"),
UpstreamCategory(RuleCodePrefix::PLW, "Warning"),
UpstreamCategory(RuleCodePrefix::Pylint(codes::Pylint::C), "Convention"),
UpstreamCategory(RuleCodePrefix::Pylint(codes::Pylint::E), "Error"),
UpstreamCategory(RuleCodePrefix::Pylint(codes::Pylint::R), "Refactor"),
UpstreamCategory(RuleCodePrefix::Pylint(codes::Pylint::W), "Warning"),
]),
_ => None,
}

View file

@ -8,7 +8,8 @@ use serde::{Deserialize, Serialize};
use strum::IntoEnumIterator;
use strum_macros::EnumIter;
use crate::registry::{Linter, Rule, RuleCodePrefix, RuleIter, RuleNamespace};
use crate::codes::RuleCodePrefix;
use crate::registry::{Linter, Rule, RuleIter, RuleNamespace};
use crate::rule_redirects::get_redirect;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -48,8 +49,8 @@ impl FromStr for RuleSelector {
}
Ok(Self::Prefix {
prefix: RuleCodePrefix::from_str(s)
.map_err(|_| ParseError::Unknown(s.to_string()))?,
prefix: RuleCodePrefix::parse(&linter, code)
.map_err(|_| ParseError::Unknown(code.to_string()))?,
redirected_from,
})
}
@ -69,9 +70,7 @@ impl RuleSelector {
match self {
RuleSelector::All => ("", "ALL"),
RuleSelector::Prefix { prefix, .. } => {
let prefix: &'static str = prefix.into();
let (linter, code) = Linter::parse_code(prefix).unwrap();
(linter.common_prefix(), code)
(prefix.linter().common_prefix(), prefix.short_code())
}
RuleSelector::Linter(l) => (l.common_prefix(), ""),
}
@ -182,7 +181,11 @@ impl JsonSchema for RuleSelector {
instance_type: Some(InstanceType::String.into()),
enum_values: Some(
std::iter::once("ALL".to_string())
.chain(RuleCodePrefix::iter().map(|s| s.as_ref().to_string()))
.chain(RuleCodePrefix::iter().map(|p| {
let prefix = p.linter().common_prefix();
let code = p.short_code();
format!("{prefix}{code}")
}))
.map(Value::String)
.collect(),
),
@ -196,7 +199,17 @@ impl RuleSelector {
match self {
RuleSelector::All => Specificity::All,
RuleSelector::Linter(..) => Specificity::Linter,
RuleSelector::Prefix { prefix, .. } => prefix.specificity(),
RuleSelector::Prefix { prefix, .. } => {
let prefix: &'static str = prefix.short_code();
match prefix.len() {
1 => Specificity::Code1Char,
2 => Specificity::Code2Chars,
3 => Specificity::Code3Chars,
4 => Specificity::Code4Chars,
5 => Specificity::Code5Chars,
_ => panic!("RuleSelector::specificity doesn't yet support codes with so many characters"),
}
}
}
}
}

View file

@ -13,7 +13,7 @@ mod tests {
use textwrap::dedent;
use crate::linter::{check_path, LinterResult};
use crate::registry::{Rule, RuleCodePrefix};
use crate::registry::{Linter, Rule};
use crate::settings::flags;
use crate::source_code::{Indexer, Locator, Stylist};
use crate::test::test_path;
@ -21,7 +21,7 @@ mod tests {
fn rule_code(contents: &str, expected: &[Rule]) {
let contents = dedent(contents);
let settings = settings::Settings::for_rules(&RuleCodePrefix::PD);
let settings = settings::Settings::for_rules(&Linter::PandasVet);
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
let locator = Locator::new(&contents);
let stylist = Stylist::from_contents(&contents, &locator);

View file

@ -15,7 +15,7 @@ mod tests {
use textwrap::dedent;
use crate::linter::{check_path, LinterResult};
use crate::registry::{Rule, RuleCodePrefix};
use crate::registry::{Linter, Rule};
use crate::settings::flags;
use crate::source_code::{Indexer, Locator, Stylist};
use crate::test::test_path;
@ -239,7 +239,7 @@ mod tests {
/// Note that all tests marked with `#[ignore]` should be considered TODOs.
fn flakes(contents: &str, expected: &[Rule]) {
let contents = dedent(contents);
let settings = settings::Settings::for_rules(&RuleCodePrefix::F);
let settings = settings::Settings::for_rules(&Linter::Pyflakes);
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
let locator = Locator::new(&contents);
let stylist = Stylist::from_contents(&contents, &locator);

View file

@ -6,7 +6,8 @@ use rustc_hash::FxHashSet;
use super::hashable::{HashableGlobSet, HashableHashSet};
use super::types::{FilePattern, PythonVersion};
use super::Settings;
use crate::registry::RuleCodePrefix;
use crate::codes::{self, RuleCodePrefix};
use crate::registry::Linter;
use crate::rule_selector::{prefix_to_selector, RuleSelector};
use crate::rules::{
flake8_annotations, flake8_bandit, flake8_bugbear, flake8_builtins, flake8_errmsg,
@ -16,8 +17,8 @@ use crate::rules::{
};
pub const PREFIXES: &[RuleSelector] = &[
prefix_to_selector(RuleCodePrefix::E),
prefix_to_selector(RuleCodePrefix::F),
prefix_to_selector(RuleCodePrefix::Pycodestyle(codes::Pycodestyle::E)),
RuleSelector::Linter(Linter::Pyflakes),
];
pub const TARGET_VERSION: PythonVersion = PythonVersion::Py310;

View file

@ -12,7 +12,7 @@ use strum::IntoEnumIterator;
use self::hashable::{HashableGlobMatcher, HashableGlobSet, HashableHashSet, HashableRegex};
use self::rule_table::RuleTable;
use crate::cache::cache_dir;
use crate::registry::{Rule, INCOMPATIBLE_CODES};
use crate::registry::{Rule, RuleNamespace, INCOMPATIBLE_CODES};
use crate::rule_selector::{RuleSelector, Specificity};
use crate::rules::{
flake8_annotations, flake8_bandit, flake8_bugbear, flake8_builtins, flake8_errmsg,
@ -360,7 +360,11 @@ impl From<&Configuration> for RuleTable {
for (from, target) in redirects {
// TODO(martin): This belongs into the ruff_cli crate.
crate::warn_user!("`{from}` has been remapped to `{}`.", target.as_ref());
crate::warn_user!(
"`{from}` has been remapped to `{}{}`.",
target.linter().common_prefix(),
target.short_code()
);
}
let mut rules = Self::empty();
@ -430,7 +434,8 @@ mod tests {
use rustc_hash::FxHashSet;
use super::configuration::RuleSelection;
use crate::registry::{Rule, RuleCodePrefix};
use crate::codes::{self, Pycodestyle};
use crate::registry::Rule;
use crate::settings::configuration::Configuration;
use crate::settings::rule_table::RuleTable;
@ -448,7 +453,7 @@ mod tests {
#[test]
fn rule_codes() {
let actual = resolve_rules([RuleSelection {
select: Some(vec![RuleCodePrefix::W.into()]),
select: Some(vec![codes::Pycodestyle::W.into()]),
..RuleSelection::default()
}]);
@ -460,31 +465,31 @@ mod tests {
assert_eq!(actual, expected);
let actual = resolve_rules([RuleSelection {
select: Some(vec![RuleCodePrefix::W6.into()]),
select: Some(vec![Pycodestyle::W6.into()]),
..RuleSelection::default()
}]);
let expected = FxHashSet::from_iter([Rule::InvalidEscapeSequence]);
assert_eq!(actual, expected);
let actual = resolve_rules([RuleSelection {
select: Some(vec![RuleCodePrefix::W.into()]),
ignore: vec![RuleCodePrefix::W292.into()],
select: Some(vec![Pycodestyle::W.into()]),
ignore: vec![codes::Pycodestyle::W292.into()],
..RuleSelection::default()
}]);
let expected = FxHashSet::from_iter([Rule::DocLineTooLong, Rule::InvalidEscapeSequence]);
assert_eq!(actual, expected);
let actual = resolve_rules([RuleSelection {
select: Some(vec![RuleCodePrefix::W292.into()]),
ignore: vec![RuleCodePrefix::W.into()],
select: Some(vec![Pycodestyle::W292.into()]),
ignore: vec![Pycodestyle::W.into()],
..RuleSelection::default()
}]);
let expected = FxHashSet::from_iter([Rule::NoNewLineAtEndOfFile]);
assert_eq!(actual, expected);
let actual = resolve_rules([RuleSelection {
select: Some(vec![RuleCodePrefix::W605.into()]),
ignore: vec![RuleCodePrefix::W605.into()],
select: Some(vec![Pycodestyle::W605.into()]),
ignore: vec![Pycodestyle::W605.into()],
..RuleSelection::default()
}]);
let expected = FxHashSet::from_iter([]);
@ -492,12 +497,12 @@ mod tests {
let actual = resolve_rules([
RuleSelection {
select: Some(vec![RuleCodePrefix::W.into()]),
ignore: vec![RuleCodePrefix::W292.into()],
select: Some(vec![Pycodestyle::W.into()]),
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
RuleSelection {
extend_select: vec![RuleCodePrefix::W292.into()],
extend_select: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
]);
@ -510,13 +515,13 @@ mod tests {
let actual = resolve_rules([
RuleSelection {
select: Some(vec![RuleCodePrefix::W.into()]),
ignore: vec![RuleCodePrefix::W292.into()],
select: Some(vec![Pycodestyle::W.into()]),
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
RuleSelection {
extend_select: vec![RuleCodePrefix::W292.into()],
ignore: vec![RuleCodePrefix::W.into()],
extend_select: vec![Pycodestyle::W292.into()],
ignore: vec![Pycodestyle::W.into()],
..RuleSelection::default()
},
]);
@ -529,11 +534,11 @@ mod tests {
let actual = resolve_rules([
RuleSelection {
select: Some(vec![]),
ignore: vec![RuleCodePrefix::W292.into()],
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
RuleSelection {
select: Some(vec![RuleCodePrefix::W.into()]),
select: Some(vec![Pycodestyle::W.into()]),
..RuleSelection::default()
},
]);
@ -543,12 +548,12 @@ mod tests {
let actual = resolve_rules([
RuleSelection {
select: Some(vec![]),
ignore: vec![RuleCodePrefix::W292.into()],
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
RuleSelection {
select: Some(vec![RuleCodePrefix::W.into()]),
ignore: vec![RuleCodePrefix::W505.into()],
select: Some(vec![Pycodestyle::W.into()]),
ignore: vec![Pycodestyle::W505.into()],
..RuleSelection::default()
},
]);

View file

@ -123,7 +123,7 @@ mod tests {
use anyhow::Result;
use rustc_hash::FxHashMap;
use crate::registry::RuleCodePrefix;
use crate::codes::{self, RuleCodePrefix};
use crate::rules::flake8_quotes::settings::Quote;
use crate::rules::flake8_tidy_imports::banned_api::ApiBan;
use crate::rules::flake8_tidy_imports::relative_imports::Strictness;
@ -207,7 +207,7 @@ select = ["E501"]
pyproject.tool,
Some(Tools {
ruff: Some(Options {
select: Some(vec![RuleCodePrefix::E501.into()]),
select: Some(vec![codes::Pycodestyle::E501.into()]),
..Options::default()
})
})
@ -225,8 +225,8 @@ ignore = ["E501"]
pyproject.tool,
Some(Tools {
ruff: Some(Options {
extend_select: Some(vec![RuleCodePrefix::RUF100.into()]),
ignore: Some(vec![RuleCodePrefix::E501.into()]),
extend_select: Some(vec![codes::Ruff::_100.into()]),
ignore: Some(vec![codes::Pycodestyle::E501.into()]),
..Options::default()
})
})
@ -283,7 +283,7 @@ other-attribute = 1
external: Some(vec!["V101".to_string()]),
per_file_ignores: Some(FxHashMap::from_iter([(
"__init__.py".to_string(),
vec![RuleCodePrefix::F401.into()]
vec![RuleCodePrefix::Pyflakes(codes::Pyflakes::_401).into()]
)])),
flake8_bugbear: Some(flake8_bugbear::settings::Options {
extend_immutable_calls: Some(vec![

View file

@ -37,7 +37,7 @@ pub fn linter(format: HelpFormat) -> Result<()> {
.upstream_categories()
.unwrap()
.iter()
.map(|UpstreamCategory(prefix, ..)| prefix.as_ref())
.map(|UpstreamCategory(prefix, ..)| prefix.short_code())
.join("/"),
prefix => prefix.to_string(),
};
@ -52,8 +52,8 @@ pub fn linter(format: HelpFormat) -> Result<()> {
name: linter_info.name(),
categories: linter_info.upstream_categories().map(|cats| {
cats.iter()
.map(|UpstreamCategory(prefix, name, ..)| LinterCategoryInfo {
prefix: prefix.as_ref(),
.map(|UpstreamCategory(prefix, name)| LinterCategoryInfo {
prefix: prefix.short_code(),
name,
})
.collect()
@ -74,7 +74,7 @@ pub fn linter(format: HelpFormat) -> Result<()> {
.upstream_categories()
.unwrap()
.iter()
.map(|UpstreamCategory(prefix, ..)| prefix.as_ref())
.map(|UpstreamCategory(prefix, ..)| prefix.short_code())
.join("/"),
prefix => prefix.to_string(),
};

View file

@ -62,7 +62,7 @@ pub fn main(args: &Args) -> Result<()> {
.upstream_categories()
.unwrap()
.iter()
.map(|UpstreamCategory(prefix, ..)| prefix.as_ref())
.map(|UpstreamCategory(prefix, ..)| prefix.short_code())
.join(", "),
prefix => prefix.to_string(),
};
@ -104,7 +104,11 @@ pub fn main(args: &Args) -> Result<()> {
if let Some(categories) = linter.upstream_categories() {
for UpstreamCategory(prefix, name) in categories {
table_out.push_str(&format!("#### {name} ({})", prefix.as_ref()));
table_out.push_str(&format!(
"#### {name} ({}{})",
linter.common_prefix(),
prefix.short_code()
));
table_out.push('\n');
table_out.push('\n');
generate_table(&mut table_out, prefix);

View file

@ -1,10 +1,12 @@
//! This crate implements internal macros for the `ruff` library.
use proc_macro::TokenStream;
use syn::{parse_macro_input, DeriveInput, ItemFn};
mod config;
mod define_violation;
mod derive_message_formats;
mod map_codes;
mod register_rules;
mod rule_code_prefix;
mod rule_namespace;
@ -41,10 +43,15 @@ pub fn derive_rule_namespace(input: proc_macro::TokenStream) -> proc_macro::Toke
}
#[proc_macro_attribute]
pub fn derive_message_formats(
_attr: proc_macro::TokenStream,
item: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
pub fn map_codes(_attr: TokenStream, item: TokenStream) -> TokenStream {
let func = parse_macro_input!(item as ItemFn);
map_codes::map_codes(&func)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
#[proc_macro_attribute]
pub fn derive_message_formats(_attr: TokenStream, item: TokenStream) -> TokenStream {
let func = parse_macro_input!(item as ItemFn);
derive_message_formats::derive_message_formats(&func).into()
}

View file

@ -0,0 +1,307 @@
use std::collections::{BTreeMap, HashMap};
use itertools::Itertools;
use proc_macro2::TokenStream;
use quote::{quote, ToTokens};
use syn::{
parenthesized, parse::Parse, spanned::Spanned, Attribute, Error, Expr, ExprCall, ExprMatch,
Ident, ItemFn, LitStr, Pat, Path, Stmt, Token,
};
use crate::rule_code_prefix::{get_prefix_ident, if_all_same};
pub fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
let Some(last_stmt) = func.block.stmts.last() else {
return Err(Error::new(func.block.span(), "expected body to end in an expression"));
};
let Stmt::Expr(Expr::Call(ExprCall{args: some_args, ..})) = last_stmt else {
return Err(Error::new(last_stmt.span(), "expected last expression to be Some(match (..) { .. })"))
};
let mut some_args = some_args.into_iter();
let (Some(Expr::Match(ExprMatch { arms, .. })), None) = (some_args.next(), some_args.next()) else {
return Err(Error::new(last_stmt.span(), "expected last expression to be Some(match (..) { .. })"))
};
let mut linters: BTreeMap<Ident, BTreeMap<String, (Path, Vec<Attribute>)>> = BTreeMap::new();
for arm in arms {
if matches!(arm.pat, Pat::Wild(..)) {
break;
}
let entry = syn::parse::<Entry>(arm.into_token_stream().into())?;
linters
.entry(entry.linter)
.or_default()
.insert(entry.code.value(), (entry.rule, entry.attrs));
}
let linter_idents: Vec<_> = linters.keys().collect();
let mut out = quote! {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum RuleCodePrefix {
#(#linter_idents(#linter_idents),)*
}
impl RuleCodePrefix {
pub fn linter(&self) -> &'static Linter {
match self {
#(Self::#linter_idents(..) => &crate::registry::Linter::#linter_idents,)*
}
}
pub fn short_code(&self) -> &'static str {
match self {
#(Self::#linter_idents(code) => code.into(),)*
}
}
}
};
for (linter, map) in &linters {
out.extend(super::rule_code_prefix::expand(
linter,
map.iter().map(|(k, v)| (k.as_str(), &v.1)),
));
out.extend(quote! {
impl From<#linter> for RuleCodePrefix {
fn from(linter: #linter) -> Self {
Self::#linter(linter)
}
}
impl From<#linter> for crate::rule_selector::RuleSelector {
fn from(linter: #linter) -> Self {
Self::Prefix{prefix: RuleCodePrefix::#linter(linter), redirected_from: None}
}
}
});
}
let mut all_codes = Vec::new();
for (linter, map) in &linters {
let mut full_map: HashMap<_, _> = map
.iter()
.map(|(code, rule)| (code.clone(), vec![rule.clone()]))
.collect();
for code in map.keys() {
for i in 1..=code.len() {
let prefix = code[..i].to_string();
let rules: Vec<_> = map
.iter()
.filter_map(|(code, rules)| {
if code.starts_with(&prefix) {
Some(rules)
} else {
None
}
})
.cloned()
.collect();
full_map.insert(prefix, rules);
}
}
for (code, names) in &full_map {
let prefix_ident = get_prefix_ident(code);
let attr = match if_all_same(names.iter().map(|(_, attrs)| attrs)) {
Some(attr) => quote!(#(#attr)*),
None => quote!(),
};
all_codes.push(quote! {
#attr Self::#linter(#linter::#prefix_ident)
});
}
let mut prefix_into_iter_match_arms = quote!();
for (code, rules) in full_map {
let rule_paths = rules.iter().map(|(path, attrs)| quote!(#(#attrs)* #path));
let prefix_ident = get_prefix_ident(&code);
let attr = match if_all_same(rules.iter().map(|(_, attrs)| attrs)) {
Some(attr) => quote!(#(#attr)*),
None => quote!(),
};
prefix_into_iter_match_arms.extend(quote! {
#attr #linter::#prefix_ident => vec![#(#rule_paths,)*].into_iter(),
});
}
out.extend(quote! {
impl IntoIterator for &#linter {
type Item = Rule;
type IntoIter = ::std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
match self { #prefix_into_iter_match_arms }
}
}
});
}
out.extend(quote! {
impl IntoIterator for &RuleCodePrefix {
type Item = Rule;
type IntoIter = ::std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
match self {
#(RuleCodePrefix::#linter_idents(prefix) => prefix.into_iter(),)*
}
}
}
});
out.extend(quote! {
impl RuleCodePrefix {
pub fn parse(linter: &Linter, code: &str) -> Result<Self, FromCodeError> {
use std::str::FromStr;
Ok(match linter {
#(Linter::#linter_idents => RuleCodePrefix::#linter_idents(#linter_idents::from_str(code).map_err(|_| FromCodeError::Unknown)?),)*
})
}
}
});
out.extend(quote! {
impl crate::registry::Rule {
pub fn from_code(code: &str) -> Result<Self, FromCodeError> {
use crate::registry::RuleNamespace;
let (linter, code) = Linter::parse_code(code).ok_or(FromCodeError::Unknown)?;
let prefix: RuleCodePrefix = RuleCodePrefix::parse(&linter, code)?;
Ok(prefix.into_iter().next().unwrap())
}
}
});
#[allow(clippy::type_complexity)]
let mut rule_to_codes: HashMap<&Path, Vec<(&Ident, &String, &Vec<Attribute>)>> = HashMap::new();
for (linter, map) in &linters {
for (code, (rule, attrs)) in map {
rule_to_codes
.entry(rule)
.or_default()
.push((linter, code, attrs));
}
}
let mut rule_noqa_code_match_arms = quote!();
for (rule, codes) in rule_to_codes {
let (linter, code, attrs) = codes
.iter()
.sorted_by_key(|(l, ..)| *l == "Pylint") // TODO: more sophisticated sorting
.next()
.unwrap();
rule_noqa_code_match_arms.extend(quote! {
#(#attrs)* #rule => NoqaCode(crate::registry::Linter::#linter.common_prefix(), #code),
});
}
out.extend(quote! {
impl crate::registry::Rule {
pub fn noqa_code(&self) -> NoqaCode {
use crate::registry::RuleNamespace;
match self {
#rule_noqa_code_match_arms
// TODO: support rules without codes
// rule => rule.as_ref()
}
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord)]
pub struct NoqaCode(&'static str, &'static str);
impl std::fmt::Display for NoqaCode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
use std::fmt::Write;
write!(f, "{}{}", self.0, self.1)
}
}
impl PartialEq<&str> for NoqaCode {
fn eq(&self, other: &&str) -> bool {
match other.strip_prefix(self.0) {
Some(suffix) => suffix == self.1,
None => false
}
}
}
});
let mut linter_into_iter_match_arms = quote!();
for (linter, map) in &linters {
let rule_paths = map.values().map(|(path, attrs)| quote!(#(#attrs)* #path));
linter_into_iter_match_arms.extend(quote! {
crate::registry::Linter::#linter => vec![#(#rule_paths,)*].into_iter(),
});
}
out.extend(quote! {
impl IntoIterator for &crate::registry::Linter {
type Item = Rule;
type IntoIter = ::std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
match self {
#linter_into_iter_match_arms
}
}
}
});
out.extend(quote! {
impl RuleCodePrefix {
pub fn iter() -> ::std::vec::IntoIter<RuleCodePrefix> {
vec![ #(#all_codes,)* ].into_iter()
}
}
#[derive(thiserror::Error, Debug)]
pub enum FromCodeError {
#[error("unknown rule code")]
Unknown,
}
});
Ok(out)
}
struct Entry {
linter: Ident,
code: LitStr,
rule: Path,
attrs: Vec<Attribute>,
}
impl Parse for Entry {
/// Parses a match arm like:
///
/// (Pycodestyle, "E101") => Rule::MixedSpacesAndTabs,
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
let attrs = Attribute::parse_outer(input)?;
let pat_tuple;
parenthesized!(pat_tuple in input);
let linter: Ident = pat_tuple.parse()?;
let _: Token!(,) = pat_tuple.parse()?;
let code: LitStr = pat_tuple.parse()?;
let _: Token!(=>) = input.parse()?;
let rule: Path = input.parse()?;
let _: Token!(,) = input.parse()?;
Ok(Entry {
linter,
code,
rule,
attrs,
})
}
}

View file

@ -1,9 +1,6 @@
use std::collections::HashMap;
use proc_macro2::Span;
use quote::quote;
use syn::parse::Parse;
use syn::{Attribute, Ident, LitStr, Path, Token};
use syn::{Attribute, Ident, Path, Token};
pub fn register_rules(input: &Input) -> proc_macro2::TokenStream {
let mut rule_variants = quote!();
@ -11,18 +8,14 @@ pub fn register_rules(input: &Input) -> proc_macro2::TokenStream {
let mut rule_message_formats_match_arms = quote!();
let mut rule_autofixable_match_arms = quote!();
let mut rule_explanation_match_arms = quote!();
let mut rule_code_match_arms = quote!();
let mut rule_from_code_match_arms = quote!();
let mut diagnostic_kind_code_match_arms = quote!();
let mut diagnostic_kind_body_match_arms = quote!();
let mut diagnostic_kind_fixable_match_arms = quote!();
let mut diagnostic_kind_commit_match_arms = quote!();
let mut from_impls_for_diagnostic_kind = quote!();
for (code, path, name, attr) in &input.entries {
let code_str = LitStr::new(&code.to_string(), Span::call_site());
for (path, name, attr) in &input.entries {
rule_variants.extend(quote! {
#[doc = #code_str]
#(#attr)*
#name,
});
@ -34,8 +27,6 @@ pub fn register_rules(input: &Input) -> proc_macro2::TokenStream {
rule_autofixable_match_arms
.extend(quote! {#(#attr)* Self::#name => <#path as Violation>::AUTOFIX,});
rule_explanation_match_arms.extend(quote! {#(#attr)* Self::#name => #path::explanation(),});
rule_code_match_arms.extend(quote! {#(#attr)* Self::#name => NoqaCode(#code_str),});
rule_from_code_match_arms.extend(quote! {#(#attr)* #code_str => Ok(&Rule::#name), });
diagnostic_kind_code_match_arms
.extend(quote! {#(#attr)* Self::#name(..) => &Rule::#name, });
diagnostic_kind_body_match_arms
@ -55,19 +46,6 @@ pub fn register_rules(input: &Input) -> proc_macro2::TokenStream {
});
}
let code_to_name: HashMap<_, _> = input
.entries
.iter()
.map(|(code, _, name, _)| (code.to_string(), name))
.collect();
let rule_code_prefix = super::rule_code_prefix::expand(
&Ident::new("Rule", Span::call_site()),
&Ident::new("RuleCodePrefix", Span::call_site()),
input.entries.iter().map(|(code, .., attr)| (code, attr)),
|code| code_to_name[code],
);
quote! {
#[derive(
EnumIter,
@ -86,11 +64,6 @@ pub fn register_rules(input: &Input) -> proc_macro2::TokenStream {
#[derive(AsRefStr, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum DiagnosticKind { #diagnostic_kind_variants }
#[derive(thiserror::Error, Debug)]
pub enum FromCodeError {
#[error("unknown rule code")]
Unknown,
}
impl Rule {
/// Returns the format strings used to report violations of this rule.
@ -105,32 +78,6 @@ pub fn register_rules(input: &Input) -> proc_macro2::TokenStream {
pub fn autofixable(&self) -> Option<crate::violation::AutofixKind> {
match self { #rule_autofixable_match_arms }
}
pub fn noqa_code(&self) -> NoqaCode {
match self { #rule_code_match_arms }
}
pub fn from_code(code: &str) -> Result<Self, FromCodeError> {
match code {
#rule_from_code_match_arms
_ => Err(FromCodeError::Unknown),
}
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord)]
pub struct NoqaCode(&'static str);
impl std::fmt::Display for NoqaCode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
self.0.fmt(f)
}
}
impl PartialEq<&str> for NoqaCode {
fn eq(&self, other: &&str) -> bool {
self.0 == *other
}
}
impl DiagnosticKind {
@ -156,13 +103,11 @@ pub fn register_rules(input: &Input) -> proc_macro2::TokenStream {
}
#from_impls_for_diagnostic_kind
#rule_code_prefix
}
}
pub struct Input {
entries: Vec<(Ident, Path, Ident, Vec<Attribute>)>,
entries: Vec<(Path, Ident, Vec<Attribute>)>,
}
impl Parse for Input {
@ -173,12 +118,12 @@ impl Parse for Input {
let attrs = input.call(Attribute::parse_outer)?;
// Parse the `RuleCodePrefix::... => ...` part.
let code: Ident = input.parse()?;
let _code: Ident = input.parse()?;
let _: Token![=>] = input.parse()?;
let path: Path = input.parse()?;
let name = path.segments.last().unwrap().ident.clone();
let _: Token![,] = input.parse()?;
entries.push((code, path, name, attrs));
entries.push((path, name, attrs));
}
Ok(Self { entries })
}

View file

@ -4,63 +4,55 @@ use proc_macro2::Span;
use quote::quote;
use syn::{Attribute, Ident};
pub fn get_prefix_ident(prefix: &str) -> Ident {
let prefix = if prefix.as_bytes()[0].is_ascii_digit() {
// Identifiers in Rust may not start with a number.
format!("_{prefix}")
} else {
prefix.to_string()
};
Ident::new(&prefix, Span::call_site())
}
pub fn expand<'a>(
rule_type: &Ident,
prefix_ident: &Ident,
variants: impl Iterator<Item = (&'a Ident, &'a Vec<Attribute>)>,
variant_name: impl Fn(&str) -> &'a Ident,
variants: impl Iterator<Item = (&'a str, &'a Vec<Attribute>)>,
) -> proc_macro2::TokenStream {
// Build up a map from prefix to matching RuleCodes.
let mut prefix_to_codes: BTreeMap<String, BTreeSet<String>> = BTreeMap::default();
let mut attributes: BTreeMap<String, &[Attribute]> = BTreeMap::default();
let mut pl_codes = BTreeSet::new();
let mut code_to_attributes: BTreeMap<String, &[Attribute]> = BTreeMap::default();
for (variant, attr) in variants {
let code_str = variant.to_string();
let code_prefix_len = code_str
.chars()
.take_while(|char| char.is_alphabetic())
.count();
let code_suffix_len = code_str.len() - code_prefix_len;
for i in 0..=code_suffix_len {
let prefix = code_str[..code_prefix_len + i].to_string();
for i in 1..=code_str.len() {
let prefix = code_str[..i].to_string();
prefix_to_codes
.entry(prefix)
.or_default()
.insert(code_str.clone());
}
if code_str.starts_with("PL") {
pl_codes.insert(code_str.clone());
}
attributes.insert(code_str, attr);
code_to_attributes.insert(code_str, attr);
}
prefix_to_codes.insert("PL".to_string(), pl_codes);
let variant_strs: Vec<_> = prefix_to_codes.keys().collect();
let variant_idents: Vec<_> = prefix_to_codes
.keys()
.map(|prefix| {
let ident = get_prefix_ident(prefix);
quote! {
#ident
}
})
.collect();
let prefix_variants = prefix_to_codes.iter().map(|(prefix, codes)| {
let prefix = Ident::new(prefix, Span::call_site());
let attrs = attributes_for_prefix(codes, &attributes);
quote! {
#attrs
#prefix
}
});
let prefix_impl = generate_impls(
rule_type,
prefix_ident,
&prefix_to_codes,
variant_name,
&attributes,
);
let attributes: Vec<_> = prefix_to_codes
.values()
.map(|codes| attributes_for_prefix(codes, &code_to_attributes))
.collect();
quote! {
#[derive(
::strum_macros::EnumIter,
::strum_macros::EnumString,
::strum_macros::AsRefStr,
::strum_macros::IntoStaticStr,
Debug,
PartialEq,
Eq,
@ -68,83 +60,34 @@ pub fn expand<'a>(
Ord,
Clone,
Hash,
::serde::Serialize,
::serde::Deserialize,
)]
pub enum #prefix_ident {
#(#prefix_variants,)*
#(#attributes #variant_idents,)*
}
#prefix_impl
}
}
impl std::str::FromStr for #prefix_ident {
type Err = FromCodeError;
fn generate_impls<'a>(
rule_type: &Ident,
prefix_ident: &Ident,
prefix_to_codes: &BTreeMap<String, BTreeSet<String>>,
variant_name: impl Fn(&str) -> &'a Ident,
attributes: &BTreeMap<String, &[Attribute]>,
) -> proc_macro2::TokenStream {
let into_iter_match_arms = prefix_to_codes.iter().map(|(prefix_str, codes)| {
let prefix = Ident::new(prefix_str, Span::call_site());
let attrs = attributes_for_prefix(codes, attributes);
let codes = codes.iter().map(|code| {
let rule_variant = variant_name(code);
let attrs = attributes[code];
quote! {
#(#attrs)*
#rule_type::#rule_variant
}
});
quote! {
#attrs
#prefix_ident::#prefix => vec![#(#codes),*].into_iter(),
}
});
let specificity_match_arms = prefix_to_codes.iter().map(|(prefix_str, codes)| {
let prefix = Ident::new(prefix_str, Span::call_site());
let mut num_numeric = prefix_str.chars().filter(|char| char.is_numeric()).count();
if prefix_str != "PL" && prefix_str.starts_with("PL") {
num_numeric += 1;
}
let suffix_len = match num_numeric {
0 => quote! { Specificity::Linter },
1 => quote! { Specificity::Code1Char },
2 => quote! { Specificity::Code2Chars },
3 => quote! { Specificity::Code3Chars },
4 => quote! { Specificity::Code4Chars },
5 => quote! { Specificity::Code5Chars },
_ => panic!("Invalid prefix: {prefix}"),
};
let attrs = attributes_for_prefix(codes, attributes);
quote! {
#attrs
#prefix_ident::#prefix => #suffix_len,
}
});
quote! {
impl #prefix_ident {
pub(crate) fn specificity(&self) -> crate::rule_selector::Specificity {
use crate::rule_selector::Specificity;
#[allow(clippy::match_same_arms)]
match self {
#(#specificity_match_arms)*
fn from_str(code: &str) -> Result<Self, Self::Err> {
match code {
#(#attributes #variant_strs => Ok(Self::#variant_idents),)*
_ => Err(FromCodeError::Unknown)
}
}
}
impl IntoIterator for &#prefix_ident {
type Item = #rule_type;
type IntoIter = ::std::vec::IntoIter<Self::Item>;
impl From<&#prefix_ident> for &'static str {
fn from(code: &#prefix_ident) -> Self {
match code {
#(#attributes #prefix_ident::#variant_idents => #variant_strs,)*
}
}
}
fn into_iter(self) -> Self::IntoIter {
#[allow(clippy::match_same_arms)]
impl AsRef<str> for #prefix_ident {
fn as_ref(&self) -> &str {
match self {
#(#into_iter_match_arms)*
#(#attributes Self::#variant_idents => #variant_strs,)*
}
}
}
@ -163,7 +106,7 @@ fn attributes_for_prefix(
/// If all values in an iterator are the same, return that value. Otherwise,
/// return `None`.
fn if_all_same<T: PartialEq>(iter: impl Iterator<Item = T>) -> Option<T> {
pub fn if_all_same<T: PartialEq>(iter: impl Iterator<Item = T>) -> Option<T> {
let mut iter = iter.peekable();
let first = iter.next()?;
if iter.all(|x| x == first) {

View file

@ -1,7 +1,6 @@
use std::cmp::Reverse;
use std::collections::HashSet;
use proc_macro2::{Ident, Span};
use quote::quote;
use syn::spanned::Spanned;
use syn::{Attribute, Data, DataEnum, DeriveInput, Error, Lit, Meta, MetaNameValue};
@ -18,7 +17,6 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
let mut common_prefix_match_arms = quote!();
let mut name_match_arms = quote!(Self::Ruff => "Ruff-specific rules",);
let mut url_match_arms = quote!(Self::Ruff => None,);
let mut into_iter_match_arms = quote!();
let mut all_prefixes = HashSet::new();
@ -79,11 +77,6 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
if let [prefix] = &prefixes[..] {
common_prefix_match_arms.extend(quote! { Self::#variant_ident => #prefix, });
let prefix_ident = Ident::new(prefix, Span::call_site());
into_iter_match_arms.extend(quote! {
#ident::#variant_ident => RuleCodePrefix::#prefix_ident.into_iter(),
});
} else {
// There is more than one prefix. We already previously asserted
// that prefixes of the same variant don't start with the same character
@ -106,13 +99,6 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
}});
}
into_iter_match_arms.extend(quote! {
#ident::Pycodestyle => {
let rules: Vec<_> = (&RuleCodePrefix::E).into_iter().chain(&RuleCodePrefix::W).collect();
rules.into_iter()
}
});
Ok(quote! {
impl crate::registry::RuleNamespace for #ident {
fn parse_code(code: &str) -> Option<(Self, &str)> {
@ -132,19 +118,6 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
match self { #url_match_arms }
}
}
impl IntoIterator for &#ident {
type Item = Rule;
type IntoIter = ::std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
use colored::Colorize;
match self {
#into_iter_match_arms
}
}
}
})
}