refactor: Introduce crates folder (#2088)

This PR introduces a new `crates` directory and moves all "product" crates into that folder. 

Part of #2059.
This commit is contained in:
Micha Reiser 2023-02-05 22:47:48 +01:00 committed by GitHub
parent e3dfa2e04e
commit cd8be8c0be
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
1785 changed files with 314 additions and 298 deletions

View file

@ -0,0 +1,203 @@
use quote::{quote, quote_spanned};
use syn::parse::{Parse, ParseStream};
use syn::spanned::Spanned;
use syn::token::Comma;
use syn::{
AngleBracketedGenericArguments, Attribute, Data, DataStruct, DeriveInput, Field, Fields, Lit,
LitStr, Path, PathArguments, PathSegment, Token, Type, TypePath,
};
pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
let DeriveInput { ident, data, .. } = input;
match data {
Data::Struct(DataStruct {
fields: Fields::Named(fields),
..
}) => {
let mut output = vec![];
for field in fields.named.iter() {
let docs: Vec<&Attribute> = field
.attrs
.iter()
.filter(|attr| attr.path.is_ident("doc"))
.collect();
if docs.is_empty() {
return Err(syn::Error::new(
field.span(),
"Missing documentation for field",
));
}
if let Some(attr) = field.attrs.iter().find(|attr| attr.path.is_ident("option")) {
output.push(handle_option(field, attr, docs)?);
};
if field
.attrs
.iter()
.any(|attr| attr.path.is_ident("option_group"))
{
output.push(handle_option_group(field)?);
};
}
Ok(quote! {
use crate::settings::options_base::{OptionEntry, OptionField, OptionGroup, ConfigurationOptions};
#[automatically_derived]
impl ConfigurationOptions for #ident {
fn get_available_options() -> Vec<OptionEntry> {
vec![#(#output),*]
}
}
})
}
_ => Err(syn::Error::new(
ident.span(),
"Can only derive ConfigurationOptions from structs with named fields.",
)),
}
}
/// For a field with type `Option<Foobar>` where `Foobar` itself is a struct
/// deriving `ConfigurationOptions`, create code that calls retrieves options
/// from that group: `Foobar::get_available_options()`
fn handle_option_group(field: &Field) -> syn::Result<proc_macro2::TokenStream> {
let ident = field
.ident
.as_ref()
.expect("Expected to handle named fields");
match &field.ty {
Type::Path(TypePath {
path: Path { segments, .. },
..
}) => match segments.first() {
Some(PathSegment {
ident: type_ident,
arguments:
PathArguments::AngleBracketed(AngleBracketedGenericArguments { args, .. }),
..
}) if type_ident == "Option" => {
let path = &args[0];
let kebab_name = LitStr::new(&ident.to_string().replace('_', "-"), ident.span());
Ok(quote_spanned!(
ident.span() => OptionEntry::Group(OptionGroup {
name: #kebab_name,
fields: #path::get_available_options(),
})
))
}
_ => Err(syn::Error::new(
ident.span(),
"Expected `Option<_>` as type.",
)),
},
_ => Err(syn::Error::new(ident.span(), "Expected type.")),
}
}
/// Parse a `doc` attribute into it a string literal.
fn parse_doc(doc: &Attribute) -> syn::Result<String> {
let doc = doc
.parse_meta()
.map_err(|e| syn::Error::new(doc.span(), e))?;
match doc {
syn::Meta::NameValue(syn::MetaNameValue {
lit: Lit::Str(lit_str),
..
}) => Ok(lit_str.value()),
_ => Err(syn::Error::new(doc.span(), "Expected doc attribute.")),
}
}
/// Parse an `#[option(doc="...", default="...", value_type="...",
/// example="...")]` attribute and return data in the form of an `OptionField`.
fn handle_option(
field: &Field,
attr: &Attribute,
docs: Vec<&Attribute>,
) -> syn::Result<proc_macro2::TokenStream> {
// Convert the list of `doc` attributes into a single string.
let doc = textwrap::dedent(
&docs
.into_iter()
.map(parse_doc)
.collect::<syn::Result<Vec<_>>>()?
.join("\n"),
)
.trim_matches('\n')
.to_string();
let ident = field
.ident
.as_ref()
.expect("Expected to handle named fields");
let FieldAttributes {
default,
value_type,
example,
..
} = attr.parse_args::<FieldAttributes>()?;
let kebab_name = LitStr::new(&ident.to_string().replace('_', "-"), ident.span());
Ok(quote_spanned!(
ident.span() => OptionEntry::Field(OptionField {
name: #kebab_name,
doc: &#doc,
default: &#default,
value_type: &#value_type,
example: &#example,
})
))
}
#[derive(Debug)]
struct FieldAttributes {
default: String,
value_type: String,
example: String,
}
impl Parse for FieldAttributes {
fn parse(input: ParseStream) -> syn::Result<Self> {
let default = _parse_key_value(input, "default")?;
input.parse::<Comma>()?;
let value_type = _parse_key_value(input, "value_type")?;
input.parse::<Comma>()?;
let example = _parse_key_value(input, "example")?;
if !input.is_empty() {
input.parse::<Comma>()?;
}
Ok(Self {
default,
value_type,
example: textwrap::dedent(&example).trim_matches('\n').to_string(),
})
}
}
fn _parse_key_value(input: ParseStream, name: &str) -> syn::Result<String> {
let ident: proc_macro2::Ident = input.parse()?;
if ident != name {
return Err(syn::Error::new(
ident.span(),
format!("Expected `{name}` name"),
));
}
input.parse::<Token![=]>()?;
let value: Lit = input.parse()?;
match &value {
Lit::Str(v) => Ok(v.value()),
_ => Err(syn::Error::new(value.span(), "Expected literal string")),
}
}

View file

@ -0,0 +1,164 @@
use std::collections::HashMap;
use proc_macro2::Span;
use quote::quote;
use syn::parse::Parse;
use syn::{Attribute, Ident, LitStr, Path, Token};
pub fn define_rule_mapping(mapping: &Mapping) -> proc_macro2::TokenStream {
let mut rule_variants = quote!();
let mut diagkind_variants = quote!();
let mut rule_message_formats_match_arms = quote!();
let mut rule_autofixable_match_arms = quote!();
let mut rule_code_match_arms = quote!();
let mut rule_from_code_match_arms = quote!();
let mut diagkind_code_match_arms = quote!();
let mut diagkind_body_match_arms = quote!();
let mut diagkind_fixable_match_arms = quote!();
let mut diagkind_commit_match_arms = quote!();
let mut from_impls_for_diagkind = quote!();
for (code, path, name, attr) in &mapping.entries {
let code_str = LitStr::new(&code.to_string(), Span::call_site());
rule_variants.extend(quote! {
#[doc = #code_str]
#(#attr)*
#name,
});
diagkind_variants.extend(quote! {#(#attr)* #name(#path),});
// Apply the `attrs` to each arm, like `[cfg(feature = "foo")]`.
rule_message_formats_match_arms
.extend(quote! {#(#attr)* Self::#name => <#path as Violation>::message_formats(),});
rule_autofixable_match_arms
.extend(quote! {#(#attr)* Self::#name => <#path as Violation>::AUTOFIX,});
rule_code_match_arms.extend(quote! {#(#attr)* Self::#name => #code_str,});
rule_from_code_match_arms.extend(quote! {#(#attr)* #code_str => Ok(Rule::#name), });
diagkind_code_match_arms.extend(quote! {#(#attr)* Self::#name(..) => &Rule::#name, });
diagkind_body_match_arms
.extend(quote! {#(#attr)* Self::#name(x) => Violation::message(x), });
diagkind_fixable_match_arms
.extend(quote! {#(#attr)* Self::#name(x) => x.autofix_title_formatter().is_some(),});
diagkind_commit_match_arms.extend(
quote! {#(#attr)* Self::#name(x) => x.autofix_title_formatter().map(|f| f(x)), },
);
from_impls_for_diagkind.extend(quote! {
#(#attr)*
impl From<#path> for DiagnosticKind {
fn from(x: #path) -> Self {
DiagnosticKind::#name(x)
}
}
});
}
let code_to_name: HashMap<_, _> = mapping
.entries
.iter()
.map(|(code, _, name, _)| (code.to_string(), name))
.collect();
let rulecodeprefix = super::rule_code_prefix::expand(
&Ident::new("Rule", Span::call_site()),
&Ident::new("RuleCodePrefix", Span::call_site()),
mapping.entries.iter().map(|(code, ..)| code),
|code| code_to_name[code],
mapping.entries.iter().map(|(.., attr)| attr),
);
quote! {
#[derive(
EnumIter,
Debug,
PartialEq,
Eq,
Clone,
Hash,
PartialOrd,
Ord,
AsRefStr,
)]
#[strum(serialize_all = "kebab-case")]
pub enum Rule { #rule_variants }
#[derive(AsRefStr, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum DiagnosticKind { #diagkind_variants }
#[derive(thiserror::Error, Debug)]
pub enum FromCodeError {
#[error("unknown rule code")]
Unknown,
}
impl Rule {
/// Returns the format strings used to report violations of this rule.
pub fn message_formats(&self) -> &'static [&'static str] {
match self { #rule_message_formats_match_arms }
}
pub fn autofixable(&self) -> Option<crate::violation::AutofixKind> {
match self { #rule_autofixable_match_arms }
}
pub fn code(&self) -> &'static str {
match self { #rule_code_match_arms }
}
pub fn from_code(code: &str) -> Result<Self, FromCodeError> {
match code {
#rule_from_code_match_arms
_ => Err(FromCodeError::Unknown),
}
}
}
impl DiagnosticKind {
/// The rule of the diagnostic.
pub fn rule(&self) -> &'static Rule {
match self { #diagkind_code_match_arms }
}
/// The body text for the diagnostic.
pub fn body(&self) -> String {
match self { #diagkind_body_match_arms }
}
/// Whether the diagnostic is (potentially) fixable.
pub fn fixable(&self) -> bool {
match self { #diagkind_fixable_match_arms }
}
/// The message used to describe the fix action for a given `DiagnosticKind`.
pub fn commit(&self) -> Option<String> {
match self { #diagkind_commit_match_arms }
}
}
#from_impls_for_diagkind
#rulecodeprefix
}
}
pub struct Mapping {
entries: Vec<(Ident, Path, Ident, Vec<Attribute>)>,
}
impl Parse for Mapping {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
let mut entries = Vec::new();
while !input.is_empty() {
// Grab the `#[cfg(...)]` attributes.
let attrs = input.call(Attribute::parse_outer)?;
// Parse the `RuleCodePrefix::... => ...` part.
let code: Ident = input.parse()?;
let _: Token![=>] = input.parse()?;
let path: Path = input.parse()?;
let name = path.segments.last().unwrap().ident.clone();
let _: Token![,] = input.parse()?;
entries.push((code, path, name, attrs));
}
Ok(Self { entries })
}
}

View file

@ -0,0 +1,55 @@
use proc_macro2::TokenStream;
use quote::{quote, quote_spanned, ToTokens};
use syn::spanned::Spanned;
use syn::{Block, Expr, ItemFn, Stmt};
pub fn derive_message_formats(func: &ItemFn) -> proc_macro2::TokenStream {
let mut strings = quote!();
if let Err(err) = parse_block(&func.block, &mut strings) {
return err;
}
quote! {
#func
fn message_formats() -> &'static [&'static str] {
&[#strings]
}
}
}
fn parse_block(block: &Block, strings: &mut TokenStream) -> Result<(), TokenStream> {
let Some(Stmt::Expr(last)) = block.stmts.last() else {panic!("expected last statement in block to be an expression")};
parse_expr(last, strings)?;
Ok(())
}
fn parse_expr(expr: &Expr, strings: &mut TokenStream) -> Result<(), TokenStream> {
match expr {
Expr::Macro(mac) if mac.mac.path.is_ident("format") => {
let Some(first_token) = mac.mac.tokens.to_token_stream().into_iter().next() else {
return Err(quote_spanned!(expr.span() => compile_error!("expected format! to have an argument")))
};
strings.extend(quote! {#first_token,});
Ok(())
}
Expr::Block(block) => parse_block(&block.block, strings),
Expr::If(expr) => {
parse_block(&expr.then_branch, strings)?;
if let Some((_, then)) = &expr.else_branch {
parse_expr(then, strings)?;
}
Ok(())
}
Expr::Match(block) => {
for arm in &block.arms {
parse_expr(&arm.body, strings)?;
}
Ok(())
}
_ => Err(quote_spanned!(
expr.span() =>
compile_error!("expected last expression to be a format! macro or a match block")
)),
}
}

View file

@ -0,0 +1,40 @@
//! This crate implements internal macros for the `ruff` library.
use proc_macro::TokenStream;
use syn::{parse_macro_input, DeriveInput, ItemFn};
mod config;
mod define_rule_mapping;
mod derive_message_formats;
mod rule_code_prefix;
mod rule_namespace;
#[proc_macro_derive(ConfigurationOptions, attributes(option, doc, option_group))]
pub fn derive_config(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = parse_macro_input!(input as DeriveInput);
config::derive_impl(input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
#[proc_macro]
pub fn define_rule_mapping(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let mapping = parse_macro_input!(item as define_rule_mapping::Mapping);
define_rule_mapping::define_rule_mapping(&mapping).into()
}
#[proc_macro_derive(RuleNamespace, attributes(prefix))]
pub fn derive_rule_namespace(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = parse_macro_input!(input as DeriveInput);
rule_namespace::derive_impl(input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
#[proc_macro_attribute]
pub fn derive_message_formats(_attr: TokenStream, item: TokenStream) -> TokenStream {
let func = parse_macro_input!(item as ItemFn);
derive_message_formats::derive_message_formats(&func).into()
}

View file

@ -0,0 +1,156 @@
use std::collections::BTreeMap;
use proc_macro2::Span;
use quote::quote;
use syn::{Attribute, Ident};
pub fn expand<'a>(
rule_type: &Ident,
prefix_ident: &Ident,
variants: impl Iterator<Item = &'a Ident>,
variant_name: impl Fn(&str) -> &'a Ident,
attr: impl Iterator<Item = &'a Vec<Attribute>>,
) -> proc_macro2::TokenStream {
// Build up a map from prefix to matching RuleCodes.
let mut prefix_to_codes: BTreeMap<String, BTreeMap<String, Vec<Attribute>>> =
BTreeMap::default();
let mut pl_codes = BTreeMap::new();
for (variant, attr) in variants.zip(attr) {
let code_str = variant.to_string();
let code_prefix_len = code_str
.chars()
.take_while(|char| char.is_alphabetic())
.count();
let code_suffix_len = code_str.len() - code_prefix_len;
for i in 0..=code_suffix_len {
let prefix = code_str[..code_prefix_len + i].to_string();
prefix_to_codes
.entry(prefix)
.or_default()
.entry(code_str.clone())
.or_insert_with(|| attr.clone());
}
if code_str.starts_with("PL") {
pl_codes.insert(code_str, attr.clone());
}
}
prefix_to_codes.insert("PL".to_string(), pl_codes);
let prefix_variants = prefix_to_codes.iter().map(|(prefix, codes)| {
let prefix = Ident::new(prefix, Span::call_site());
let attr = if_all_same(codes.values().cloned()).unwrap_or_default();
quote! {
#(#attr)*
#prefix
}
});
let prefix_impl = generate_impls(rule_type, prefix_ident, &prefix_to_codes, variant_name);
quote! {
#[derive(
::strum_macros::EnumIter,
::strum_macros::EnumString,
::strum_macros::AsRefStr,
::strum_macros::IntoStaticStr,
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Clone,
Hash,
::serde::Serialize,
::serde::Deserialize,
)]
pub enum #prefix_ident {
#(#prefix_variants,)*
}
#prefix_impl
}
}
fn generate_impls<'a>(
rule_type: &Ident,
prefix_ident: &Ident,
prefix_to_codes: &BTreeMap<String, BTreeMap<String, Vec<Attribute>>>,
variant_name: impl Fn(&str) -> &'a Ident,
) -> proc_macro2::TokenStream {
let into_iter_match_arms = prefix_to_codes.iter().map(|(prefix_str, codes)| {
let prefix = Ident::new(prefix_str, Span::call_site());
let attr = if_all_same(codes.values().cloned()).unwrap_or_default();
let codes = codes.iter().map(|(code, attr)| {
let rule_variant = variant_name(code);
quote! {
#(#attr)*
#rule_type::#rule_variant
}
});
quote! {
#(#attr)*
#prefix_ident::#prefix => vec![#(#codes),*].into_iter(),
}
});
let specificity_match_arms = prefix_to_codes.iter().map(|(prefix_str, codes)| {
let prefix = Ident::new(prefix_str, Span::call_site());
let mut num_numeric = prefix_str.chars().filter(|char| char.is_numeric()).count();
if prefix_str != "PL" && prefix_str.starts_with("PL") {
num_numeric += 1;
}
let suffix_len = match num_numeric {
0 => quote! { Specificity::Linter },
1 => quote! { Specificity::Code1Char },
2 => quote! { Specificity::Code2Chars },
3 => quote! { Specificity::Code3Chars },
4 => quote! { Specificity::Code4Chars },
5 => quote! { Specificity::Code5Chars },
_ => panic!("Invalid prefix: {prefix}"),
};
let attr = if_all_same(codes.values().cloned()).unwrap_or_default();
quote! {
#(#attr)*
#prefix_ident::#prefix => #suffix_len,
}
});
quote! {
impl #prefix_ident {
pub(crate) fn specificity(&self) -> crate::rule_selector::Specificity {
use crate::rule_selector::Specificity;
#[allow(clippy::match_same_arms)]
match self {
#(#specificity_match_arms)*
}
}
}
impl IntoIterator for &#prefix_ident {
type Item = #rule_type;
type IntoIter = ::std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
#[allow(clippy::match_same_arms)]
match self {
#(#into_iter_match_arms)*
}
}
}
}
}
/// If all values in an iterator are the same, return that value. Otherwise, return `None`.
fn if_all_same<T: PartialEq>(iter: impl Iterator<Item = T>) -> Option<T> {
let mut iter = iter.peekable();
let first = iter.next()?;
if iter.all(|x| x == first) {
Some(first)
} else {
None
}
}

View file

@ -0,0 +1,174 @@
use std::cmp::Reverse;
use std::collections::HashSet;
use proc_macro2::{Ident, Span};
use quote::quote;
use syn::spanned::Spanned;
use syn::{Attribute, Data, DataEnum, DeriveInput, Error, Lit, Meta, MetaNameValue};
pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
let DeriveInput { ident, data: Data::Enum(DataEnum {
variants, ..
}), .. } = input else {
return Err(Error::new(input.ident.span(), "only named fields are supported"));
};
let mut parsed = Vec::new();
let mut common_prefix_match_arms = quote!();
let mut name_match_arms = quote!(Self::Ruff => "Ruff-specific rules",);
let mut url_match_arms = quote!(Self::Ruff => None,);
let mut into_iter_match_arms = quote!();
let mut all_prefixes = HashSet::new();
for variant in variants {
let mut first_chars = HashSet::new();
let prefixes: Result<Vec<_>, _> = variant
.attrs
.iter()
.filter(|a| a.path.is_ident("prefix"))
.map(|attr| {
let Ok(Meta::NameValue(MetaNameValue{lit: Lit::Str(lit), ..})) = attr.parse_meta() else {
return Err(Error::new(attr.span(), r#"expected attribute to be in the form of [#prefix = "..."]"#));
};
let str = lit.value();
match str.chars().next() {
None => return Err(Error::new(lit.span(), "expected prefix string to be non-empty")),
Some(c) => if !first_chars.insert(c) {
return Err(Error::new(lit.span(), format!("this variant already has another prefix starting with the character '{c}'")))
}
}
if !all_prefixes.insert(str.clone()) {
return Err(Error::new(lit.span(), "prefix has already been defined before"));
}
Ok(str)
})
.collect();
let prefixes = prefixes?;
if prefixes.is_empty() {
return Err(Error::new(
variant.span(),
r#"Missing #[prefix = "..."] attribute"#,
));
}
let Some(doc_attr) = variant.attrs.iter().find(|a| a.path.is_ident("doc")) else {
return Err(Error::new(variant.span(), r#"expected a doc comment"#))
};
let variant_ident = variant.ident;
if variant_ident != "Ruff" {
let (name, url) = parse_doc_attr(doc_attr)?;
name_match_arms.extend(quote! {Self::#variant_ident => #name,});
url_match_arms.extend(quote! {Self::#variant_ident => Some(#url),});
}
for lit in &prefixes {
parsed.push((
lit.clone(),
variant_ident.clone(),
match prefixes.len() {
1 => ParseStrategy::SinglePrefix,
_ => ParseStrategy::MultiplePrefixes,
},
));
}
if let [prefix] = &prefixes[..] {
common_prefix_match_arms.extend(quote! { Self::#variant_ident => #prefix, });
let prefix_ident = Ident::new(prefix, Span::call_site());
into_iter_match_arms.extend(quote! {
#ident::#variant_ident => RuleCodePrefix::#prefix_ident.into_iter(),
});
} else {
// There is more than one prefix. We already previously asserted
// that prefixes of the same variant don't start with the same character
// so the common prefix for this variant is the empty string.
common_prefix_match_arms.extend(quote! { Self::#variant_ident => "", });
}
}
parsed.sort_by_key(|(prefix, ..)| Reverse(prefix.len()));
let mut if_statements = quote!();
for (prefix, field, strategy) in parsed {
let ret_str = match strategy {
ParseStrategy::SinglePrefix => quote!(rest),
ParseStrategy::MultiplePrefixes => quote!(code),
};
if_statements.extend(quote! {if let Some(rest) = code.strip_prefix(#prefix) {
return Some((#ident::#field, #ret_str));
}});
}
into_iter_match_arms.extend(quote! {
#ident::Pycodestyle => {
let rules: Vec<_> = (&RuleCodePrefix::E).into_iter().chain(&RuleCodePrefix::W).collect();
rules.into_iter()
}
});
Ok(quote! {
impl crate::registry::RuleNamespace for #ident {
fn parse_code(code: &str) -> Option<(Self, &str)> {
#if_statements
None
}
fn common_prefix(&self) -> &'static str {
match self { #common_prefix_match_arms }
}
fn name(&self) -> &'static str {
match self { #name_match_arms }
}
fn url(&self) -> Option<&'static str> {
match self { #url_match_arms }
}
}
impl IntoIterator for &#ident {
type Item = Rule;
type IntoIter = ::std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
use colored::Colorize;
match self {
#into_iter_match_arms
}
}
}
})
}
/// Parses an attribute in the form of `#[doc = " [name](https://example.com/)"]`
/// into a tuple of link label and URL.
fn parse_doc_attr(doc_attr: &Attribute) -> syn::Result<(String, String)> {
let Ok(Meta::NameValue(MetaNameValue{lit: Lit::Str(doc_lit), ..})) = doc_attr.parse_meta() else {
return Err(Error::new(doc_attr.span(), r#"expected doc attribute to be in the form of #[doc = "..."]"#))
};
parse_markdown_link(doc_lit.value().trim())
.map(|(name, url)| (name.to_string(), url.to_string()))
.ok_or_else(|| {
Error::new(
doc_lit.span(),
r#"expected doc comment to be in the form of `/// [name](https://example.com/)`"#,
)
})
}
fn parse_markdown_link(link: &str) -> Option<(&str, &str)> {
link.strip_prefix('[')?.strip_suffix(')')?.split_once("](")
}
enum ParseStrategy {
SinglePrefix,
MultiplePrefixes,
}