Create a separate dev crate for development scripts (#607)

This commit is contained in:
Charlie Marsh 2022-11-05 15:59:18 -04:00 committed by GitHub
parent 2e1799dd80
commit 6741ea9790
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 110 additions and 45 deletions

17
Cargo.lock generated
View file

@ -2230,7 +2230,6 @@ dependencies = [
"chrono", "chrono",
"clap 4.0.15", "clap 4.0.15",
"clearscreen", "clearscreen",
"codegen",
"colored", "colored",
"common-path", "common-path",
"criterion", "criterion",
@ -2265,6 +2264,22 @@ dependencies = [
"walkdir", "walkdir",
] ]
[[package]]
name = "ruff_dev"
version = "0.0.101"
dependencies = [
"anyhow",
"clap 4.0.15",
"codegen",
"itertools",
"ruff",
"rustpython-ast",
"rustpython-common",
"rustpython-parser",
"strum",
"strum_macros",
]
[[package]] [[package]]
name = "rustls" name = "rustls"
version = "0.20.6" version = "0.20.6"

View file

@ -1,6 +1,7 @@
[workspace] [workspace]
members = [ members = [
"flake8_to_ruff", "flake8_to_ruff",
"ruff_dev",
] ]
[package] [package]
@ -56,7 +57,6 @@ getrandom = { version = "0.2.7", features = ["js"] }
[dev-dependencies] [dev-dependencies]
assert_cmd = { version = "2.0.4" } assert_cmd = { version = "2.0.4" }
codegen = { version = "0.2.0" }
criterion = { version = "0.4.0" } criterion = { version = "0.4.0" }
insta = { version = "1.19.1", features = ["yaml"] } insta = { version = "1.19.1", features = ["yaml"] }
test-case = { version = "2.2.2" } test-case = { version = "2.2.2" }

16
ruff_dev/Cargo.toml Normal file
View file

@ -0,0 +1,16 @@
[package]
name = "ruff_dev"
version = "0.0.101"
edition = "2021"
[dependencies]
anyhow = { version = "1.0.60" }
clap = { version = "4.0.1", features = ["derive"] }
codegen = { version = "0.2.0" }
itertools = { version = "0.10.5" }
ruff = { path = ".." }
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "77b821a1941019fe34f73ce17cea013ae1b98fd0" }
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "77b821a1941019fe34f73ce17cea013ae1b98fd0" }
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "77b821a1941019fe34f73ce17cea013ae1b98fd0" }
strum = { version = "0.24.1", features = ["strum_macros"] }
strum_macros = { version = "0.24.3" }

View file

@ -1,3 +1,5 @@
//! Generate the `CheckCodePrefix` enum.
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
use std::fs::OpenOptions; use std::fs::OpenOptions;
use std::io::Write; use std::io::Write;
@ -13,17 +15,14 @@ const FILE: &str = "src/checks_gen.rs";
#[derive(Parser)] #[derive(Parser)]
#[command(author, version, about, long_about = None)] #[command(author, version, about, long_about = None)]
/// Generate the `CheckCodePrefix` enum. pub struct Cli {
struct Cli {
/// Write the generated source code to stdout (rather than to /// Write the generated source code to stdout (rather than to
/// `src/checks_gen.rs`). /// `src/checks_gen.rs`).
#[arg(long)] #[arg(long)]
dry_run: bool, dry_run: bool,
} }
fn main() -> Result<()> { pub fn main(cli: &Cli) -> Result<()> {
let cli = Cli::parse();
// Build up a map from prefix to matching CheckCodes. // Build up a map from prefix to matching CheckCodes.
let mut prefix_to_codes: BTreeMap<String, BTreeSet<CheckCode>> = Default::default(); let mut prefix_to_codes: BTreeMap<String, BTreeSet<CheckCode>> = Default::default();
for check_code in CheckCode::iter() { for check_code in CheckCode::iter() {
@ -34,9 +33,7 @@ fn main() -> Result<()> {
.count(); .count();
for i in prefix_len..=as_ref.len() { for i in prefix_len..=as_ref.len() {
let prefix = as_ref[..i].to_string(); let prefix = as_ref[..i].to_string();
let entry = prefix_to_codes let entry = prefix_to_codes.entry(prefix).or_default();
.entry(prefix)
.or_insert_with(|| Default::default());
entry.insert(check_code.clone()); entry.insert(check_code.clone());
} }
} }
@ -56,7 +53,7 @@ fn main() -> Result<()> {
.derive("Clone") .derive("Clone")
.derive("Serialize") .derive("Serialize")
.derive("Deserialize"); .derive("Deserialize");
for (prefix, _) in &prefix_to_codes { for prefix in prefix_to_codes.keys() {
gen = gen.push_variant(Variant::new(prefix.to_string())); gen = gen.push_variant(Variant::new(prefix.to_string()));
} }
@ -100,7 +97,7 @@ fn main() -> Result<()> {
.ret(Type::new("PrefixSpecificity")) .ret(Type::new("PrefixSpecificity"))
.vis("pub") .vis("pub")
.line("match self {"); .line("match self {");
for (prefix, _) in &prefix_to_codes { for prefix in prefix_to_codes.keys() {
let num_numeric = prefix.chars().filter(|char| char.is_numeric()).count(); let num_numeric = prefix.chars().filter(|char| char.is_numeric()).count();
let specificity = match num_numeric { let specificity = match num_numeric {
3 => "Explicit", 3 => "Explicit",
@ -129,7 +126,7 @@ fn main() -> Result<()> {
output.push_str("use crate::checks::CheckCode;"); output.push_str("use crate::checks::CheckCode;");
output.push('\n'); output.push('\n');
output.push('\n'); output.push('\n');
output.push_str(&format!("{}", scope.to_string())); output.push_str(&scope.to_string());
// Write the output to `src/checks_gen.rs` (or stdout). // Write the output to `src/checks_gen.rs` (or stdout).
if cli.dry_run { if cli.dry_run {

View file

@ -1,28 +1,26 @@
//! Generate a Markdown-compatible table of supported lint rules.
use std::fs; use std::fs;
use std::fs::OpenOptions; use std::fs::OpenOptions;
use std::io::Write; use std::io::Write;
use anyhow::Result; use anyhow::Result;
use clap::Parser; use clap::Args;
use ruff::checks::{CheckCategory, CheckCode}; use ruff::checks::{CheckCategory, CheckCode};
use strum::IntoEnumIterator; use strum::IntoEnumIterator;
const FILE: &str = "README.md"; const FILE: &str = "../README.md";
const BEGIN_PRAGMA: &str = "<!-- Begin auto-generated sections. -->"; const BEGIN_PRAGMA: &str = "<!-- Begin auto-generated sections. -->";
const END_PRAGMA: &str = "<!-- End auto-generated sections. -->"; const END_PRAGMA: &str = "<!-- End auto-generated sections. -->";
#[derive(Parser)] #[derive(Args)]
#[command(author, version, about, long_about = None)] pub struct Cli {
/// Generate a Markdown-compatible table of supported lint rules.
struct Cli {
/// Write the generated table to stdout (rather than to `README.md`). /// Write the generated table to stdout (rather than to `README.md`).
#[arg(long)] #[arg(long)]
dry_run: bool, dry_run: bool,
} }
fn main() -> Result<()> { pub fn main(cli: &Cli) -> Result<()> {
let cli = Cli::parse();
// Generate the table string. // Generate the table string.
let mut output = String::new(); let mut output = String::new();
for check_category in CheckCategory::iter() { for check_category in CheckCategory::iter() {
@ -43,7 +41,7 @@ fn main() -> Result<()> {
"| {} | {} | {} | {} |", "| {} | {} | {} | {} |",
check_kind.code().as_ref(), check_kind.code().as_ref(),
check_kind.as_ref(), check_kind.as_ref(),
check_kind.summary().replace("|", r"\|"), check_kind.summary().replace('|', r"\|"),
fix_token fix_token
)); ));
output.push('\n'); output.push('\n');

View file

@ -1,25 +1,25 @@
//! Run round-trip source code generation on a given Python file.
use std::path::PathBuf; use std::path::PathBuf;
use anyhow::Result; use anyhow::Result;
use clap::Parser; use clap::Args;
use ruff::code_gen::SourceGenerator; use ruff::code_gen::SourceGenerator;
use ruff::fs; use ruff::fs;
use rustpython_parser::parser; use rustpython_parser::parser;
#[derive(Debug, Parser)] #[derive(Args)]
struct Cli { pub struct Cli {
/// Python file to round-trip.
#[arg(required = true)] #[arg(required = true)]
file: PathBuf, file: PathBuf,
} }
fn main() -> Result<()> { pub fn main(cli: &Cli) -> Result<()> {
let cli = Cli::parse();
let contents = fs::read_file(&cli.file)?; let contents = fs::read_file(&cli.file)?;
let python_ast = parser::parse_program(&contents, &cli.file.to_string_lossy())?; let python_ast = parser::parse_program(&contents, &cli.file.to_string_lossy())?;
let mut generator = SourceGenerator::new(); let mut generator = SourceGenerator::new();
generator.unparse_suite(&python_ast)?; generator.unparse_suite(&python_ast)?;
println!("{}", generator.generate()?); println!("{}", generator.generate()?);
Ok(()) Ok(())
} }

5
ruff_dev/src/lib.rs Normal file
View file

@ -0,0 +1,5 @@
pub mod generate_check_code_prefix;
pub mod generate_rules_table;
pub mod generate_source_code;
pub mod print_ast;
pub mod print_tokens;

39
ruff_dev/src/main.rs Normal file
View file

@ -0,0 +1,39 @@
use anyhow::Result;
use clap::{Parser, Subcommand};
use ruff_dev::{
generate_check_code_prefix, generate_rules_table, generate_source_code, print_ast, print_tokens,
};
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
#[command(propagate_version = true)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
/// Generate the `CheckCodePrefix` enum.
GenerateCheckCodePrefix(generate_check_code_prefix::Cli),
/// Generate a Markdown-compatible table of supported lint rules.
GenerateRulesTable(generate_rules_table::Cli),
/// Run round-trip source code generation on a given Python file.
GenerateSourceCode(generate_source_code::Cli),
/// Print the AST for a given Python file.
PrintAST(print_ast::Cli),
/// Print the token stream for a given Python file.
PrintTokens(print_tokens::Cli),
}
fn main() -> Result<()> {
let cli = Cli::parse();
match &cli.command {
Commands::GenerateCheckCodePrefix(args) => generate_check_code_prefix::main(args)?,
Commands::GenerateRulesTable(args) => generate_rules_table::main(args)?,
Commands::GenerateSourceCode(args) => generate_source_code::main(args)?,
Commands::PrintAST(args) => print_ast::main(args)?,
Commands::PrintTokens(args) => print_tokens::main(args)?,
}
Ok(())
}

View file

@ -3,23 +3,20 @@
use std::path::PathBuf; use std::path::PathBuf;
use anyhow::Result; use anyhow::Result;
use clap::Parser; use clap::Args;
use ruff::fs; use ruff::fs;
use rustpython_parser::parser; use rustpython_parser::parser;
#[derive(Debug, Parser)] #[derive(Args)]
struct Cli { pub struct Cli {
/// Python file for which to generate the AST.
#[arg(required = true)] #[arg(required = true)]
file: PathBuf, file: PathBuf,
} }
fn main() -> Result<()> { pub fn main(cli: &Cli) -> Result<()> {
let cli = Cli::parse();
let contents = fs::read_file(&cli.file)?; let contents = fs::read_file(&cli.file)?;
let python_ast = parser::parse_program(&contents, &cli.file.to_string_lossy())?; let python_ast = parser::parse_program(&contents, &cli.file.to_string_lossy())?;
println!("{:#?}", python_ast); println!("{:#?}", python_ast);
Ok(()) Ok(())
} }

View file

@ -3,23 +3,21 @@
use std::path::PathBuf; use std::path::PathBuf;
use anyhow::Result; use anyhow::Result;
use clap::Parser; use clap::Args;
use ruff::fs; use ruff::fs;
use rustpython_parser::lexer; use rustpython_parser::lexer;
#[derive(Debug, Parser)] #[derive(Args)]
struct Cli { pub struct Cli {
/// Python file for which to generate the AST.
#[arg(required = true)] #[arg(required = true)]
file: PathBuf, file: PathBuf,
} }
fn main() -> Result<()> { pub fn main(cli: &Cli) -> Result<()> {
let cli = Cli::parse();
let contents = fs::read_file(&cli.file)?; let contents = fs::read_file(&cli.file)?;
for (_, tok, _) in lexer::make_tokenizer(&contents).flatten() { for (_, tok, _) in lexer::make_tokenizer(&contents).flatten() {
println!("{:#?}", tok); println!("{:#?}", tok);
} }
Ok(()) Ok(())
} }