mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-28 21:05:08 +00:00
refactor: Introduce crates folder (#2088)
This PR introduces a new `crates` directory and moves all "product" crates into that folder. Part of #2059.
This commit is contained in:
parent
e3dfa2e04e
commit
cd8be8c0be
1785 changed files with 314 additions and 298 deletions
503
crates/ruff_cli/src/args.rs
Normal file
503
crates/ruff_cli/src/args.rs
Normal file
|
@ -0,0 +1,503 @@
|
|||
#![allow(dead_code)]
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::{command, Parser};
|
||||
use regex::Regex;
|
||||
use ruff::logging::LogLevel;
|
||||
use ruff::registry::Rule;
|
||||
use ruff::resolver::ConfigProcessor;
|
||||
use ruff::settings::configuration::RuleSelection;
|
||||
use ruff::settings::types::{
|
||||
FilePattern, PatternPrefixPair, PerFileIgnore, PythonVersion, SerializationFormat,
|
||||
};
|
||||
use ruff::RuleSelector;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "ruff",
|
||||
about = "Ruff: An extremely fast Python linter.",
|
||||
after_help = "For help with a specific command, see: `ruff help <command>`."
|
||||
)]
|
||||
#[command(version)]
|
||||
pub struct Args {
|
||||
#[command(subcommand)]
|
||||
pub command: Command,
|
||||
#[clap(flatten)]
|
||||
pub log_level_args: LogLevelArgs,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Run Ruff on the given files or directories (default).
|
||||
Check(CheckArgs),
|
||||
/// Explain a rule.
|
||||
#[clap(alias = "--explain")]
|
||||
Rule {
|
||||
#[arg(value_parser=Rule::from_code)]
|
||||
rule: Rule,
|
||||
|
||||
/// Output format
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
format: HelpFormat,
|
||||
},
|
||||
/// List all supported upstream linters
|
||||
Linter {
|
||||
/// Output format
|
||||
#[arg(long, value_enum, default_value = "text")]
|
||||
format: HelpFormat,
|
||||
},
|
||||
/// Clear any caches in the current directory and any subdirectories.
|
||||
#[clap(alias = "--clean")]
|
||||
Clean,
|
||||
/// Generate shell completion.
|
||||
#[clap(alias = "--generate-shell-completion", hide = true)]
|
||||
GenerateShellCompletion { shell: clap_complete_command::Shell },
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
#[allow(clippy::struct_excessive_bools, clippy::module_name_repetitions)]
|
||||
pub struct CheckArgs {
|
||||
/// List of files or directories to check.
|
||||
pub files: Vec<PathBuf>,
|
||||
/// Attempt to automatically fix lint violations.
|
||||
#[arg(long, overrides_with("no_fix"))]
|
||||
fix: bool,
|
||||
#[clap(long, overrides_with("fix"), hide = true)]
|
||||
no_fix: bool,
|
||||
/// Show violations with source code.
|
||||
#[arg(long, overrides_with("no_show_source"))]
|
||||
show_source: bool,
|
||||
#[clap(long, overrides_with("show_source"), hide = true)]
|
||||
no_show_source: bool,
|
||||
/// Avoid writing any fixed files back; instead, output a diff for each
|
||||
/// changed file to stdout.
|
||||
#[arg(long)]
|
||||
pub diff: bool,
|
||||
/// Run in watch mode by re-running whenever files change.
|
||||
#[arg(short, long)]
|
||||
pub watch: bool,
|
||||
/// Fix any fixable lint violations, but don't report on leftover
|
||||
/// violations. Implies `--fix`.
|
||||
#[arg(long, overrides_with("no_fix_only"))]
|
||||
fix_only: bool,
|
||||
#[clap(long, overrides_with("fix_only"), hide = true)]
|
||||
no_fix_only: bool,
|
||||
/// Output serialization format for violations.
|
||||
#[arg(long, value_enum, env = "RUFF_FORMAT")]
|
||||
pub format: Option<SerializationFormat>,
|
||||
/// Path to the `pyproject.toml` or `ruff.toml` file to use for
|
||||
/// configuration.
|
||||
#[arg(long, conflicts_with = "isolated")]
|
||||
pub config: Option<PathBuf>,
|
||||
/// Comma-separated list of rule codes to enable (or ALL, to enable all
|
||||
/// rules).
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
help_heading = "Rule selection"
|
||||
)]
|
||||
pub select: Option<Vec<RuleSelector>>,
|
||||
/// Comma-separated list of rule codes to disable.
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
help_heading = "Rule selection"
|
||||
)]
|
||||
pub ignore: Option<Vec<RuleSelector>>,
|
||||
/// Like --select, but adds additional rule codes on top of the selected
|
||||
/// ones.
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
help_heading = "Rule selection"
|
||||
)]
|
||||
pub extend_select: Option<Vec<RuleSelector>>,
|
||||
/// Like --ignore. (Deprecated: You can just use --ignore instead.)
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
help_heading = "Rule selection",
|
||||
hide = true
|
||||
)]
|
||||
pub extend_ignore: Option<Vec<RuleSelector>>,
|
||||
/// List of mappings from file pattern to code to exclude
|
||||
#[arg(long, value_delimiter = ',', help_heading = "Rule selection")]
|
||||
pub per_file_ignores: Option<Vec<PatternPrefixPair>>,
|
||||
/// List of paths, used to omit files and/or directories from analysis.
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "FILE_PATTERN",
|
||||
help_heading = "File selection"
|
||||
)]
|
||||
pub exclude: Option<Vec<FilePattern>>,
|
||||
/// Like --exclude, but adds additional files and directories on top of
|
||||
/// those already excluded.
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "FILE_PATTERN",
|
||||
help_heading = "File selection"
|
||||
)]
|
||||
pub extend_exclude: Option<Vec<FilePattern>>,
|
||||
/// List of rule codes to treat as eligible for autofix. Only applicable
|
||||
/// when autofix itself is enabled (e.g., via `--fix`).
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
help_heading = "Rule selection"
|
||||
)]
|
||||
pub fixable: Option<Vec<RuleSelector>>,
|
||||
/// List of rule codes to treat as ineligible for autofix. Only applicable
|
||||
/// when autofix itself is enabled (e.g., via `--fix`).
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
value_name = "RULE_CODE",
|
||||
help_heading = "Rule selection"
|
||||
)]
|
||||
pub unfixable: Option<Vec<RuleSelector>>,
|
||||
/// Respect file exclusions via `.gitignore` and other standard ignore
|
||||
/// files.
|
||||
#[arg(
|
||||
long,
|
||||
overrides_with("no_respect_gitignore"),
|
||||
help_heading = "File selection"
|
||||
)]
|
||||
respect_gitignore: bool,
|
||||
#[clap(long, overrides_with("respect_gitignore"), hide = true)]
|
||||
no_respect_gitignore: bool,
|
||||
/// Enforce exclusions, even for paths passed to Ruff directly on the
|
||||
/// command-line.
|
||||
#[arg(
|
||||
long,
|
||||
overrides_with("no_force_exclude"),
|
||||
help_heading = "File selection"
|
||||
)]
|
||||
force_exclude: bool,
|
||||
#[clap(long, overrides_with("force_exclude"), hide = true)]
|
||||
no_force_exclude: bool,
|
||||
/// The minimum Python version that should be supported.
|
||||
#[arg(long, help_heading = "Rule configuration")]
|
||||
pub target_version: Option<PythonVersion>,
|
||||
/// Set the line-length for length-associated rules and automatic
|
||||
/// formatting.
|
||||
#[arg(long, help_heading = "Rule configuration")]
|
||||
pub line_length: Option<usize>,
|
||||
/// Regular expression matching the name of dummy variables.
|
||||
#[arg(long, help_heading = "Rule configuration")]
|
||||
pub dummy_variable_rgx: Option<Regex>,
|
||||
/// Disable cache reads.
|
||||
#[arg(short, long, help_heading = "Miscellaneous")]
|
||||
pub no_cache: bool,
|
||||
/// Ignore all configuration files.
|
||||
#[arg(long, conflicts_with = "config", help_heading = "Miscellaneous")]
|
||||
pub isolated: bool,
|
||||
/// Path to the cache directory.
|
||||
#[arg(long, env = "RUFF_CACHE_DIR", help_heading = "Miscellaneous")]
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
/// The name of the file when passing it through stdin.
|
||||
#[arg(long, help_heading = "Miscellaneous")]
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
/// Exit with status code "0", even upon detecting lint violations.
|
||||
#[arg(short, long, help_heading = "Miscellaneous")]
|
||||
pub exit_zero: bool,
|
||||
/// Does nothing and will be removed in the future.
|
||||
#[arg(
|
||||
long,
|
||||
overrides_with("no_update_check"),
|
||||
help_heading = "Miscellaneous",
|
||||
hide = true
|
||||
)]
|
||||
update_check: bool,
|
||||
#[clap(long, overrides_with("update_check"), hide = true)]
|
||||
no_update_check: bool,
|
||||
/// Show counts for every rule with at least one violation.
|
||||
#[arg(
|
||||
long,
|
||||
// Unsupported default-command arguments.
|
||||
conflicts_with = "diff",
|
||||
conflicts_with = "show_source",
|
||||
conflicts_with = "watch",
|
||||
)]
|
||||
pub statistics: bool,
|
||||
/// Enable automatic additions of `noqa` directives to failing lines.
|
||||
#[arg(
|
||||
long,
|
||||
// conflicts_with = "add_noqa",
|
||||
conflicts_with = "show_files",
|
||||
conflicts_with = "show_settings",
|
||||
// Unsupported default-command arguments.
|
||||
conflicts_with = "statistics",
|
||||
conflicts_with = "stdin_filename",
|
||||
conflicts_with = "watch",
|
||||
conflicts_with = "fix",
|
||||
)]
|
||||
pub add_noqa: bool,
|
||||
/// See the files Ruff will be run against with the current settings.
|
||||
#[arg(
|
||||
long,
|
||||
// Fake subcommands.
|
||||
conflicts_with = "add_noqa",
|
||||
// conflicts_with = "show_files",
|
||||
conflicts_with = "show_settings",
|
||||
// Unsupported default-command arguments.
|
||||
conflicts_with = "statistics",
|
||||
conflicts_with = "stdin_filename",
|
||||
conflicts_with = "watch",
|
||||
)]
|
||||
pub show_files: bool,
|
||||
/// See the settings Ruff will use to lint a given Python file.
|
||||
#[arg(
|
||||
long,
|
||||
// Fake subcommands.
|
||||
conflicts_with = "add_noqa",
|
||||
conflicts_with = "show_files",
|
||||
// conflicts_with = "show_settings",
|
||||
// Unsupported default-command arguments.
|
||||
conflicts_with = "statistics",
|
||||
conflicts_with = "stdin_filename",
|
||||
conflicts_with = "watch",
|
||||
)]
|
||||
pub show_settings: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, clap::ValueEnum)]
|
||||
pub enum HelpFormat {
|
||||
Text,
|
||||
Json,
|
||||
}
|
||||
|
||||
#[allow(clippy::module_name_repetitions)]
|
||||
#[derive(Debug, clap::Args)]
|
||||
pub struct LogLevelArgs {
|
||||
/// Enable verbose logging.
|
||||
#[arg(
|
||||
short,
|
||||
long,
|
||||
global = true,
|
||||
group = "verbosity",
|
||||
help_heading = "Log levels"
|
||||
)]
|
||||
pub verbose: bool,
|
||||
/// Print lint violations, but nothing else.
|
||||
#[arg(
|
||||
short,
|
||||
long,
|
||||
global = true,
|
||||
group = "verbosity",
|
||||
help_heading = "Log levels"
|
||||
)]
|
||||
pub quiet: bool,
|
||||
/// Disable all logging (but still exit with status code "1" upon detecting
|
||||
/// lint violations).
|
||||
#[arg(
|
||||
short,
|
||||
long,
|
||||
global = true,
|
||||
group = "verbosity",
|
||||
help_heading = "Log levels"
|
||||
)]
|
||||
pub silent: bool,
|
||||
}
|
||||
|
||||
impl From<&LogLevelArgs> for LogLevel {
|
||||
fn from(args: &LogLevelArgs) -> Self {
|
||||
if args.silent {
|
||||
Self::Silent
|
||||
} else if args.quiet {
|
||||
Self::Quiet
|
||||
} else if args.verbose {
|
||||
Self::Verbose
|
||||
} else {
|
||||
Self::Default
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CheckArgs {
|
||||
/// Partition the CLI into command-line arguments and configuration
|
||||
/// overrides.
|
||||
pub fn partition(self) -> (Arguments, Overrides) {
|
||||
(
|
||||
Arguments {
|
||||
add_noqa: self.add_noqa,
|
||||
config: self.config,
|
||||
diff: self.diff,
|
||||
exit_zero: self.exit_zero,
|
||||
files: self.files,
|
||||
isolated: self.isolated,
|
||||
no_cache: self.no_cache,
|
||||
show_files: self.show_files,
|
||||
show_settings: self.show_settings,
|
||||
statistics: self.statistics,
|
||||
stdin_filename: self.stdin_filename,
|
||||
watch: self.watch,
|
||||
},
|
||||
Overrides {
|
||||
dummy_variable_rgx: self.dummy_variable_rgx,
|
||||
exclude: self.exclude,
|
||||
extend_exclude: self.extend_exclude,
|
||||
extend_ignore: self.extend_ignore,
|
||||
extend_select: self.extend_select,
|
||||
fixable: self.fixable,
|
||||
ignore: self.ignore,
|
||||
line_length: self.line_length,
|
||||
per_file_ignores: self.per_file_ignores,
|
||||
respect_gitignore: resolve_bool_arg(
|
||||
self.respect_gitignore,
|
||||
self.no_respect_gitignore,
|
||||
),
|
||||
select: self.select,
|
||||
show_source: resolve_bool_arg(self.show_source, self.no_show_source),
|
||||
target_version: self.target_version,
|
||||
unfixable: self.unfixable,
|
||||
// TODO(charlie): Included in `pyproject.toml`, but not inherited.
|
||||
cache_dir: self.cache_dir,
|
||||
fix: resolve_bool_arg(self.fix, self.no_fix),
|
||||
fix_only: resolve_bool_arg(self.fix_only, self.no_fix_only),
|
||||
force_exclude: resolve_bool_arg(self.force_exclude, self.no_force_exclude),
|
||||
format: self.format,
|
||||
update_check: resolve_bool_arg(self.update_check, self.no_update_check),
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_bool_arg(yes: bool, no: bool) -> Option<bool> {
|
||||
match (yes, no) {
|
||||
(true, false) => Some(true),
|
||||
(false, true) => Some(false),
|
||||
(false, false) => None,
|
||||
(..) => unreachable!("Clap should make this impossible"),
|
||||
}
|
||||
}
|
||||
|
||||
/// CLI settings that are distinct from configuration (commands, lists of files,
|
||||
/// etc.).
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct Arguments {
|
||||
pub add_noqa: bool,
|
||||
pub config: Option<PathBuf>,
|
||||
pub diff: bool,
|
||||
pub exit_zero: bool,
|
||||
pub files: Vec<PathBuf>,
|
||||
pub isolated: bool,
|
||||
pub no_cache: bool,
|
||||
pub show_files: bool,
|
||||
pub show_settings: bool,
|
||||
pub statistics: bool,
|
||||
pub stdin_filename: Option<PathBuf>,
|
||||
pub watch: bool,
|
||||
}
|
||||
|
||||
/// CLI settings that function as configuration overrides.
|
||||
#[derive(Clone)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct Overrides {
|
||||
pub dummy_variable_rgx: Option<Regex>,
|
||||
pub exclude: Option<Vec<FilePattern>>,
|
||||
pub extend_exclude: Option<Vec<FilePattern>>,
|
||||
pub extend_ignore: Option<Vec<RuleSelector>>,
|
||||
pub extend_select: Option<Vec<RuleSelector>>,
|
||||
pub fixable: Option<Vec<RuleSelector>>,
|
||||
pub ignore: Option<Vec<RuleSelector>>,
|
||||
pub line_length: Option<usize>,
|
||||
pub per_file_ignores: Option<Vec<PatternPrefixPair>>,
|
||||
pub respect_gitignore: Option<bool>,
|
||||
pub select: Option<Vec<RuleSelector>>,
|
||||
pub show_source: Option<bool>,
|
||||
pub target_version: Option<PythonVersion>,
|
||||
pub unfixable: Option<Vec<RuleSelector>>,
|
||||
// TODO(charlie): Captured in pyproject.toml as a default, but not part of `Settings`.
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
pub fix: Option<bool>,
|
||||
pub fix_only: Option<bool>,
|
||||
pub force_exclude: Option<bool>,
|
||||
pub format: Option<SerializationFormat>,
|
||||
pub update_check: Option<bool>,
|
||||
}
|
||||
|
||||
impl ConfigProcessor for &Overrides {
|
||||
fn process_config(&self, config: &mut ruff::settings::configuration::Configuration) {
|
||||
if let Some(cache_dir) = &self.cache_dir {
|
||||
config.cache_dir = Some(cache_dir.clone());
|
||||
}
|
||||
if let Some(dummy_variable_rgx) = &self.dummy_variable_rgx {
|
||||
config.dummy_variable_rgx = Some(dummy_variable_rgx.clone());
|
||||
}
|
||||
if let Some(exclude) = &self.exclude {
|
||||
config.exclude = Some(exclude.clone());
|
||||
}
|
||||
if let Some(extend_exclude) = &self.extend_exclude {
|
||||
config.extend_exclude.extend(extend_exclude.clone());
|
||||
}
|
||||
if let Some(fix) = &self.fix {
|
||||
config.fix = Some(*fix);
|
||||
}
|
||||
if let Some(fix_only) = &self.fix_only {
|
||||
config.fix_only = Some(*fix_only);
|
||||
}
|
||||
config.rule_selections.push(RuleSelection {
|
||||
select: self.select.clone(),
|
||||
ignore: self
|
||||
.ignore
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(self.extend_ignore.iter().cloned().into_iter())
|
||||
.flatten()
|
||||
.collect(),
|
||||
extend_select: self.extend_select.clone().unwrap_or_default(),
|
||||
fixable: self.fixable.clone(),
|
||||
unfixable: self.unfixable.clone().unwrap_or_default(),
|
||||
});
|
||||
if let Some(format) = &self.format {
|
||||
config.format = Some(*format);
|
||||
}
|
||||
if let Some(force_exclude) = &self.force_exclude {
|
||||
config.force_exclude = Some(*force_exclude);
|
||||
}
|
||||
if let Some(line_length) = &self.line_length {
|
||||
config.line_length = Some(*line_length);
|
||||
}
|
||||
if let Some(per_file_ignores) = &self.per_file_ignores {
|
||||
config.per_file_ignores = Some(collect_per_file_ignores(per_file_ignores.clone()));
|
||||
}
|
||||
if let Some(respect_gitignore) = &self.respect_gitignore {
|
||||
config.respect_gitignore = Some(*respect_gitignore);
|
||||
}
|
||||
if let Some(show_source) = &self.show_source {
|
||||
config.show_source = Some(*show_source);
|
||||
}
|
||||
if let Some(target_version) = &self.target_version {
|
||||
config.target_version = Some(*target_version);
|
||||
}
|
||||
if let Some(update_check) = &self.update_check {
|
||||
config.update_check = Some(*update_check);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a list of `PatternPrefixPair` structs to `PerFileIgnore`.
|
||||
pub fn collect_per_file_ignores(pairs: Vec<PatternPrefixPair>) -> Vec<PerFileIgnore> {
|
||||
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
|
||||
for pair in pairs {
|
||||
per_file_ignores
|
||||
.entry(pair.pattern)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(pair.prefix);
|
||||
}
|
||||
per_file_ignores
|
||||
.into_iter()
|
||||
.map(|(pattern, prefixes)| PerFileIgnore::new(pattern, &prefixes, None))
|
||||
.collect()
|
||||
}
|
156
crates/ruff_cli/src/cache.rs
Normal file
156
crates/ruff_cli/src/cache.rs
Normal file
|
@ -0,0 +1,156 @@
|
|||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::fs;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use filetime::FileTime;
|
||||
use log::error;
|
||||
use path_absolutize::Absolutize;
|
||||
use ruff::message::Message;
|
||||
use ruff::settings::{flags, AllSettings, Settings};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct CacheMetadata {
|
||||
mtime: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct CheckResultRef<'a> {
|
||||
metadata: &'a CacheMetadata,
|
||||
messages: &'a [Message],
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CheckResult {
|
||||
metadata: CacheMetadata,
|
||||
messages: Vec<Message>,
|
||||
}
|
||||
|
||||
fn content_dir() -> &'static Path {
|
||||
Path::new("content")
|
||||
}
|
||||
|
||||
fn cache_key<P: AsRef<Path>>(
|
||||
path: P,
|
||||
package: Option<&P>,
|
||||
settings: &Settings,
|
||||
autofix: flags::Autofix,
|
||||
) -> u64 {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
CARGO_PKG_VERSION.hash(&mut hasher);
|
||||
path.as_ref().absolutize().unwrap().hash(&mut hasher);
|
||||
package
|
||||
.as_ref()
|
||||
.map(|path| path.as_ref().absolutize().unwrap())
|
||||
.hash(&mut hasher);
|
||||
settings.hash(&mut hasher);
|
||||
autofix.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
/// Initialize the cache at the specified `Path`.
|
||||
pub fn init(path: &Path) -> Result<()> {
|
||||
// Create the cache directories.
|
||||
fs::create_dir_all(path.join(content_dir()))?;
|
||||
|
||||
// Add the CACHEDIR.TAG.
|
||||
if !cachedir::is_tagged(path)? {
|
||||
cachedir::add_tag(path)?;
|
||||
}
|
||||
|
||||
// Add the .gitignore.
|
||||
let gitignore_path = path.join(".gitignore");
|
||||
if !gitignore_path.exists() {
|
||||
let mut file = fs::File::create(gitignore_path)?;
|
||||
file.write_all(b"*")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_sync(cache_dir: &Path, key: u64, value: &[u8]) -> Result<(), std::io::Error> {
|
||||
fs::write(
|
||||
cache_dir.join(content_dir()).join(format!("{key:x}")),
|
||||
value,
|
||||
)
|
||||
}
|
||||
|
||||
fn read_sync(cache_dir: &Path, key: u64) -> Result<Vec<u8>, std::io::Error> {
|
||||
fs::read(cache_dir.join(content_dir()).join(format!("{key:x}")))
|
||||
}
|
||||
|
||||
fn del_sync(cache_dir: &Path, key: u64) -> Result<(), std::io::Error> {
|
||||
fs::remove_file(cache_dir.join(content_dir()).join(format!("{key:x}")))
|
||||
}
|
||||
|
||||
/// Get a value from the cache.
|
||||
pub fn get<P: AsRef<Path>>(
|
||||
path: P,
|
||||
package: Option<&P>,
|
||||
metadata: &fs::Metadata,
|
||||
settings: &AllSettings,
|
||||
autofix: flags::Autofix,
|
||||
) -> Option<Vec<Message>> {
|
||||
let encoded = read_sync(
|
||||
&settings.cli.cache_dir,
|
||||
cache_key(path, package, &settings.lib, autofix),
|
||||
)
|
||||
.ok()?;
|
||||
let (mtime, messages) = match bincode::deserialize::<CheckResult>(&encoded[..]) {
|
||||
Ok(CheckResult {
|
||||
metadata: CacheMetadata { mtime },
|
||||
messages,
|
||||
}) => (mtime, messages),
|
||||
Err(e) => {
|
||||
error!("Failed to deserialize encoded cache entry: {e:?}");
|
||||
return None;
|
||||
}
|
||||
};
|
||||
if FileTime::from_last_modification_time(metadata).unix_seconds() != mtime {
|
||||
return None;
|
||||
}
|
||||
Some(messages)
|
||||
}
|
||||
|
||||
/// Set a value in the cache.
|
||||
pub fn set<P: AsRef<Path>>(
|
||||
path: P,
|
||||
package: Option<&P>,
|
||||
metadata: &fs::Metadata,
|
||||
settings: &AllSettings,
|
||||
autofix: flags::Autofix,
|
||||
messages: &[Message],
|
||||
) {
|
||||
let check_result = CheckResultRef {
|
||||
metadata: &CacheMetadata {
|
||||
mtime: FileTime::from_last_modification_time(metadata).unix_seconds(),
|
||||
},
|
||||
messages,
|
||||
};
|
||||
if let Err(e) = write_sync(
|
||||
&settings.cli.cache_dir,
|
||||
cache_key(path, package, &settings.lib, autofix),
|
||||
&bincode::serialize(&check_result).unwrap(),
|
||||
) {
|
||||
error!("Failed to write to cache: {e:?}");
|
||||
}
|
||||
}
|
||||
|
||||
/// Delete a value from the cache.
|
||||
pub fn del<P: AsRef<Path>>(
|
||||
path: P,
|
||||
package: Option<&P>,
|
||||
settings: &AllSettings,
|
||||
autofix: flags::Autofix,
|
||||
) {
|
||||
drop(del_sync(
|
||||
&settings.cli.cache_dir,
|
||||
cache_key(path, package, &settings.lib, autofix),
|
||||
));
|
||||
}
|
339
crates/ruff_cli/src/commands.rs
Normal file
339
crates/ruff_cli/src/commands.rs
Normal file
|
@ -0,0 +1,339 @@
|
|||
use std::fs::remove_dir_all;
|
||||
use std::io::Write;
|
||||
use std::io::{self, BufWriter, Read};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::Instant;
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use colored::Colorize;
|
||||
use ignore::Error;
|
||||
use itertools::Itertools;
|
||||
use log::{debug, error};
|
||||
use path_absolutize::path_dedot;
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
use rayon::prelude::*;
|
||||
use serde::Serialize;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use ruff::cache::CACHE_DIR_NAME;
|
||||
use ruff::linter::add_noqa_to_path;
|
||||
use ruff::logging::LogLevel;
|
||||
use ruff::message::{Location, Message};
|
||||
use ruff::registry::{Linter, Rule, RuleNamespace};
|
||||
use ruff::resolver::PyprojectDiscovery;
|
||||
use ruff::settings::flags;
|
||||
use ruff::{fix, fs, packaging, resolver, warn_user_once, AutofixAvailability, IOError};
|
||||
|
||||
use crate::args::{HelpFormat, Overrides};
|
||||
use crate::cache;
|
||||
use crate::diagnostics::{lint_path, lint_stdin, Diagnostics};
|
||||
use crate::iterators::par_iter;
|
||||
|
||||
pub mod linter;
|
||||
|
||||
/// Run the linter over a collection of files.
|
||||
pub fn run(
|
||||
files: &[PathBuf],
|
||||
pyproject_strategy: &PyprojectDiscovery,
|
||||
overrides: &Overrides,
|
||||
cache: flags::Cache,
|
||||
autofix: fix::FixMode,
|
||||
) -> Result<Diagnostics> {
|
||||
// Collect all the Python files to check.
|
||||
let start = Instant::now();
|
||||
let (paths, resolver) = resolver::python_files_in_path(files, pyproject_strategy, overrides)?;
|
||||
let duration = start.elapsed();
|
||||
debug!("Identified files to lint in: {:?}", duration);
|
||||
|
||||
if paths.is_empty() {
|
||||
warn_user_once!("No Python files found under the given path(s)");
|
||||
return Ok(Diagnostics::default());
|
||||
}
|
||||
|
||||
// Initialize the cache.
|
||||
if matches!(cache, flags::Cache::Enabled) {
|
||||
match &pyproject_strategy {
|
||||
PyprojectDiscovery::Fixed(settings) => {
|
||||
if let Err(e) = cache::init(&settings.cli.cache_dir) {
|
||||
error!(
|
||||
"Failed to initialize cache at {}: {e:?}",
|
||||
settings.cli.cache_dir.to_string_lossy()
|
||||
);
|
||||
}
|
||||
}
|
||||
PyprojectDiscovery::Hierarchical(default) => {
|
||||
for settings in std::iter::once(default).chain(resolver.iter()) {
|
||||
if let Err(e) = cache::init(&settings.cli.cache_dir) {
|
||||
error!(
|
||||
"Failed to initialize cache at {}: {e:?}",
|
||||
settings.cli.cache_dir.to_string_lossy()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Discover the package root for each Python file.
|
||||
let package_roots = packaging::detect_package_roots(
|
||||
&paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(ignore::DirEntry::path)
|
||||
.collect::<Vec<_>>(),
|
||||
&resolver,
|
||||
pyproject_strategy,
|
||||
);
|
||||
|
||||
let start = Instant::now();
|
||||
let mut diagnostics: Diagnostics = par_iter(&paths)
|
||||
.map(|entry| {
|
||||
match entry {
|
||||
Ok(entry) => {
|
||||
let path = entry.path();
|
||||
let package = path
|
||||
.parent()
|
||||
.and_then(|parent| package_roots.get(parent))
|
||||
.and_then(|package| *package);
|
||||
let settings = resolver.resolve_all(path, pyproject_strategy);
|
||||
lint_path(path, package, settings, cache, autofix)
|
||||
.map_err(|e| (Some(path.to_owned()), e.to_string()))
|
||||
}
|
||||
Err(e) => Err((
|
||||
if let Error::WithPath { path, .. } = e {
|
||||
Some(path.clone())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
e.io_error()
|
||||
.map_or_else(|| e.to_string(), io::Error::to_string),
|
||||
)),
|
||||
}
|
||||
.unwrap_or_else(|(path, message)| {
|
||||
if let Some(path) = &path {
|
||||
let settings = resolver.resolve(path, pyproject_strategy);
|
||||
if settings.rules.enabled(&Rule::IOError) {
|
||||
Diagnostics::new(vec![Message {
|
||||
kind: IOError { message }.into(),
|
||||
location: Location::default(),
|
||||
end_location: Location::default(),
|
||||
fix: None,
|
||||
filename: format!("{}", path.display()),
|
||||
source: None,
|
||||
}])
|
||||
} else {
|
||||
error!("Failed to check {}: {message}", path.to_string_lossy());
|
||||
Diagnostics::default()
|
||||
}
|
||||
} else {
|
||||
error!("{message}");
|
||||
Diagnostics::default()
|
||||
}
|
||||
})
|
||||
})
|
||||
.reduce(Diagnostics::default, |mut acc, item| {
|
||||
acc += item;
|
||||
acc
|
||||
});
|
||||
|
||||
diagnostics.messages.sort_unstable();
|
||||
let duration = start.elapsed();
|
||||
debug!("Checked {:?} files in: {:?}", paths.len(), duration);
|
||||
|
||||
Ok(diagnostics)
|
||||
}
|
||||
|
||||
/// Read a `String` from `stdin`.
|
||||
fn read_from_stdin() -> Result<String> {
|
||||
let mut buffer = String::new();
|
||||
io::stdin().lock().read_to_string(&mut buffer)?;
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
/// Run the linter over a single file, read from `stdin`.
|
||||
pub fn run_stdin(
|
||||
filename: Option<&Path>,
|
||||
pyproject_strategy: &PyprojectDiscovery,
|
||||
overrides: &Overrides,
|
||||
autofix: fix::FixMode,
|
||||
) -> Result<Diagnostics> {
|
||||
if let Some(filename) = filename {
|
||||
if !resolver::python_file_at_path(filename, pyproject_strategy, overrides)? {
|
||||
return Ok(Diagnostics::default());
|
||||
}
|
||||
}
|
||||
let settings = match pyproject_strategy {
|
||||
PyprojectDiscovery::Fixed(settings) => settings,
|
||||
PyprojectDiscovery::Hierarchical(settings) => settings,
|
||||
};
|
||||
let package_root = filename
|
||||
.and_then(Path::parent)
|
||||
.and_then(|path| packaging::detect_package_root(path, &settings.lib.namespace_packages));
|
||||
let stdin = read_from_stdin()?;
|
||||
let mut diagnostics = lint_stdin(filename, package_root, &stdin, &settings.lib, autofix)?;
|
||||
diagnostics.messages.sort_unstable();
|
||||
Ok(diagnostics)
|
||||
}
|
||||
|
||||
/// Add `noqa` directives to a collection of files.
|
||||
pub fn add_noqa(
|
||||
files: &[PathBuf],
|
||||
pyproject_strategy: &PyprojectDiscovery,
|
||||
overrides: &Overrides,
|
||||
) -> Result<usize> {
|
||||
// Collect all the files to check.
|
||||
let start = Instant::now();
|
||||
let (paths, resolver) = resolver::python_files_in_path(files, pyproject_strategy, overrides)?;
|
||||
let duration = start.elapsed();
|
||||
debug!("Identified files to lint in: {:?}", duration);
|
||||
|
||||
if paths.is_empty() {
|
||||
warn_user_once!("No Python files found under the given path(s)");
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let start = Instant::now();
|
||||
let modifications: usize = par_iter(&paths)
|
||||
.flatten()
|
||||
.filter_map(|entry| {
|
||||
let path = entry.path();
|
||||
let settings = resolver.resolve(path, pyproject_strategy);
|
||||
match add_noqa_to_path(path, settings) {
|
||||
Ok(count) => Some(count),
|
||||
Err(e) => {
|
||||
error!("Failed to add noqa to {}: {e}", path.to_string_lossy());
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
.sum();
|
||||
|
||||
let duration = start.elapsed();
|
||||
debug!("Added noqa to files in: {:?}", duration);
|
||||
|
||||
Ok(modifications)
|
||||
}
|
||||
|
||||
/// Print the user-facing configuration settings.
|
||||
pub fn show_settings(
|
||||
files: &[PathBuf],
|
||||
pyproject_strategy: &PyprojectDiscovery,
|
||||
overrides: &Overrides,
|
||||
) -> Result<()> {
|
||||
// Collect all files in the hierarchy.
|
||||
let (paths, resolver) = resolver::python_files_in_path(files, pyproject_strategy, overrides)?;
|
||||
|
||||
// Print the list of files.
|
||||
let Some(entry) = paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.sorted_by(|a, b| a.path().cmp(b.path())).next() else {
|
||||
bail!("No files found under the given path");
|
||||
};
|
||||
let path = entry.path();
|
||||
let settings = resolver.resolve(path, pyproject_strategy);
|
||||
|
||||
let mut stdout = BufWriter::new(io::stdout().lock());
|
||||
write!(stdout, "Resolved settings for: {path:?}")?;
|
||||
write!(stdout, "{settings:#?}")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Show the list of files to be checked based on current settings.
|
||||
pub fn show_files(
|
||||
files: &[PathBuf],
|
||||
pyproject_strategy: &PyprojectDiscovery,
|
||||
overrides: &Overrides,
|
||||
) -> Result<()> {
|
||||
// Collect all files in the hierarchy.
|
||||
let (paths, _resolver) = resolver::python_files_in_path(files, pyproject_strategy, overrides)?;
|
||||
|
||||
if paths.is_empty() {
|
||||
warn_user_once!("No Python files found under the given path(s)");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Print the list of files.
|
||||
let mut stdout = BufWriter::new(io::stdout().lock());
|
||||
for entry in paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.sorted_by(|a, b| a.path().cmp(b.path()))
|
||||
{
|
||||
writeln!(stdout, "{}", entry.path().to_string_lossy())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Explanation<'a> {
|
||||
code: &'a str,
|
||||
linter: &'a str,
|
||||
summary: &'a str,
|
||||
}
|
||||
|
||||
/// Explain a `Rule` to the user.
|
||||
pub fn rule(rule: &Rule, format: HelpFormat) -> Result<()> {
|
||||
let (linter, _) = Linter::parse_code(rule.code()).unwrap();
|
||||
let mut stdout = BufWriter::new(io::stdout().lock());
|
||||
match format {
|
||||
HelpFormat::Text => {
|
||||
writeln!(stdout, "{}\n", rule.as_ref())?;
|
||||
writeln!(stdout, "Code: {} ({})\n", rule.code(), linter.name())?;
|
||||
|
||||
if let Some(autofix) = rule.autofixable() {
|
||||
writeln!(
|
||||
stdout,
|
||||
"{}",
|
||||
match autofix.available {
|
||||
AutofixAvailability::Sometimes => "Autofix is sometimes available.\n",
|
||||
AutofixAvailability::Always => "Autofix is always available.\n",
|
||||
}
|
||||
)?;
|
||||
}
|
||||
|
||||
writeln!(stdout, "Message formats:\n")?;
|
||||
|
||||
for format in rule.message_formats() {
|
||||
writeln!(stdout, "* {format}")?;
|
||||
}
|
||||
}
|
||||
HelpFormat::Json => {
|
||||
writeln!(
|
||||
stdout,
|
||||
"{}",
|
||||
serde_json::to_string_pretty(&Explanation {
|
||||
code: rule.code(),
|
||||
linter: linter.name(),
|
||||
summary: rule.message_formats()[0],
|
||||
})?
|
||||
)?;
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clear any caches in the current directory or any subdirectories.
|
||||
pub fn clean(level: LogLevel) -> Result<()> {
|
||||
let mut stderr = BufWriter::new(io::stderr().lock());
|
||||
for entry in WalkDir::new(&*path_dedot::CWD)
|
||||
.into_iter()
|
||||
.filter_map(Result::ok)
|
||||
.filter(|entry| entry.file_type().is_dir())
|
||||
{
|
||||
let cache = entry.path().join(CACHE_DIR_NAME);
|
||||
if cache.is_dir() {
|
||||
if level >= LogLevel::Default {
|
||||
writeln!(
|
||||
stderr,
|
||||
"Removing cache at: {}",
|
||||
fs::relativize_path(&cache).bold()
|
||||
)?;
|
||||
}
|
||||
remove_dir_all(&cache)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
66
crates/ruff_cli/src/commands/linter.rs
Normal file
66
crates/ruff_cli/src/commands/linter.rs
Normal file
|
@ -0,0 +1,66 @@
|
|||
use itertools::Itertools;
|
||||
use serde::Serialize;
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff::registry::{Linter, RuleNamespace, UpstreamCategory};
|
||||
|
||||
use crate::args::HelpFormat;
|
||||
|
||||
pub fn linter(format: HelpFormat) {
|
||||
match format {
|
||||
HelpFormat::Text => {
|
||||
for linter in Linter::iter() {
|
||||
let prefix = match linter.common_prefix() {
|
||||
"" => linter
|
||||
.upstream_categories()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|UpstreamCategory(prefix, ..)| prefix.as_ref())
|
||||
.join("/"),
|
||||
prefix => prefix.to_string(),
|
||||
};
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
{
|
||||
println!("{:>4} {}", prefix, linter.name());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
HelpFormat::Json => {
|
||||
let linters: Vec<_> = Linter::iter()
|
||||
.map(|linter_info| LinterInfo {
|
||||
prefix: linter_info.common_prefix(),
|
||||
name: linter_info.name(),
|
||||
categories: linter_info.upstream_categories().map(|cats| {
|
||||
cats.iter()
|
||||
.map(|UpstreamCategory(prefix, name, ..)| LinterCategoryInfo {
|
||||
prefix: prefix.as_ref(),
|
||||
name,
|
||||
})
|
||||
.collect()
|
||||
}),
|
||||
})
|
||||
.collect();
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
{
|
||||
println!("{}", serde_json::to_string_pretty(&linters).unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct LinterInfo {
|
||||
prefix: &'static str,
|
||||
name: &'static str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
categories: Option<Vec<LinterCategoryInfo>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct LinterCategoryInfo {
|
||||
prefix: &'static str,
|
||||
name: &'static str,
|
||||
}
|
225
crates/ruff_cli/src/diagnostics.rs
Normal file
225
crates/ruff_cli/src/diagnostics.rs
Normal file
|
@ -0,0 +1,225 @@
|
|||
#![cfg_attr(target_family = "wasm", allow(dead_code))]
|
||||
use std::fs::write;
|
||||
use std::io;
|
||||
use std::io::Write;
|
||||
use std::ops::AddAssign;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use log::debug;
|
||||
use ruff::linter::{lint_fix, lint_only, LinterResult};
|
||||
use ruff::message::Message;
|
||||
use ruff::settings::{flags, AllSettings, Settings};
|
||||
use ruff::{fix, fs};
|
||||
use similar::TextDiff;
|
||||
|
||||
use crate::cache;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Diagnostics {
|
||||
pub messages: Vec<Message>,
|
||||
pub fixed: usize,
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub fn new(messages: Vec<Message>) -> Self {
|
||||
Self { messages, fixed: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign for Diagnostics {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
self.messages.extend(other.messages);
|
||||
self.fixed += other.fixed;
|
||||
}
|
||||
}
|
||||
|
||||
/// Lint the source code at the given `Path`.
|
||||
pub fn lint_path(
|
||||
path: &Path,
|
||||
package: Option<&Path>,
|
||||
settings: &AllSettings,
|
||||
cache: flags::Cache,
|
||||
autofix: fix::FixMode,
|
||||
) -> Result<Diagnostics> {
|
||||
// Check the cache.
|
||||
// TODO(charlie): `fixer::Mode::Apply` and `fixer::Mode::Diff` both have
|
||||
// side-effects that aren't captured in the cache. (In practice, it's fine
|
||||
// to cache `fixer::Mode::Apply`, since a file either has no fixes, or we'll
|
||||
// write the fixes to disk, thus invalidating the cache. But it's a bit hard
|
||||
// to reason about. We need to come up with a better solution here.)
|
||||
let metadata = if matches!(cache, flags::Cache::Enabled)
|
||||
&& matches!(autofix, fix::FixMode::None | fix::FixMode::Generate)
|
||||
{
|
||||
let metadata = path.metadata()?;
|
||||
if let Some(messages) =
|
||||
cache::get(path, package.as_ref(), &metadata, settings, autofix.into())
|
||||
{
|
||||
debug!("Cache hit for: {}", path.to_string_lossy());
|
||||
return Ok(Diagnostics::new(messages));
|
||||
}
|
||||
Some(metadata)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Read the file from disk.
|
||||
let contents = fs::read_file(path)?;
|
||||
|
||||
// Lint the file.
|
||||
let (
|
||||
LinterResult {
|
||||
data: messages,
|
||||
error: parse_error,
|
||||
},
|
||||
fixed,
|
||||
) = if matches!(autofix, fix::FixMode::Apply | fix::FixMode::Diff) {
|
||||
if let Ok((result, transformed, fixed)) = lint_fix(&contents, path, package, &settings.lib)
|
||||
{
|
||||
if fixed > 0 {
|
||||
if matches!(autofix, fix::FixMode::Apply) {
|
||||
write(path, transformed.as_bytes())?;
|
||||
} else if matches!(autofix, fix::FixMode::Diff) {
|
||||
let mut stdout = io::stdout().lock();
|
||||
TextDiff::from_lines(contents.as_str(), &transformed)
|
||||
.unified_diff()
|
||||
.header(&fs::relativize_path(path), &fs::relativize_path(path))
|
||||
.to_writer(&mut stdout)?;
|
||||
stdout.write_all(b"\n")?;
|
||||
stdout.flush()?;
|
||||
}
|
||||
}
|
||||
(result, fixed)
|
||||
} else {
|
||||
// If we fail to autofix, lint the original source code.
|
||||
let result = lint_only(&contents, path, package, &settings.lib, autofix.into());
|
||||
let fixed = 0;
|
||||
(result, fixed)
|
||||
}
|
||||
} else {
|
||||
let result = lint_only(&contents, path, package, &settings.lib, autofix.into());
|
||||
let fixed = 0;
|
||||
(result, fixed)
|
||||
};
|
||||
|
||||
if let Some(err) = parse_error {
|
||||
// Notify the user of any parse errors.
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
eprintln!(
|
||||
"{}{} {}{}{} {err}",
|
||||
"error".red().bold(),
|
||||
":".bold(),
|
||||
"Failed to parse ".bold(),
|
||||
fs::relativize_path(path).bold(),
|
||||
":".bold()
|
||||
);
|
||||
}
|
||||
|
||||
// Purge the cache.
|
||||
cache::del(path, package.as_ref(), settings, autofix.into());
|
||||
} else {
|
||||
// Re-populate the cache.
|
||||
if let Some(metadata) = metadata {
|
||||
cache::set(
|
||||
path,
|
||||
package.as_ref(),
|
||||
&metadata,
|
||||
settings,
|
||||
autofix.into(),
|
||||
&messages,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Diagnostics { messages, fixed })
|
||||
}
|
||||
|
||||
/// Generate `Diagnostic`s from source code content derived from
|
||||
/// stdin.
|
||||
pub fn lint_stdin(
|
||||
path: Option<&Path>,
|
||||
package: Option<&Path>,
|
||||
contents: &str,
|
||||
settings: &Settings,
|
||||
autofix: fix::FixMode,
|
||||
) -> Result<Diagnostics> {
|
||||
// Lint the inputs.
|
||||
let (
|
||||
LinterResult {
|
||||
data: messages,
|
||||
error: parse_error,
|
||||
},
|
||||
fixed,
|
||||
) = if matches!(autofix, fix::FixMode::Apply | fix::FixMode::Diff) {
|
||||
if let Ok((result, transformed, fixed)) = lint_fix(
|
||||
contents,
|
||||
path.unwrap_or_else(|| Path::new("-")),
|
||||
package,
|
||||
settings,
|
||||
) {
|
||||
if matches!(autofix, fix::FixMode::Apply) {
|
||||
// Write the contents to stdout, regardless of whether any errors were fixed.
|
||||
io::stdout().write_all(transformed.as_bytes())?;
|
||||
} else if matches!(autofix, fix::FixMode::Diff) {
|
||||
// But only write a diff if it's non-empty.
|
||||
if fixed > 0 {
|
||||
let text_diff = TextDiff::from_lines(contents, &transformed);
|
||||
let mut unified_diff = text_diff.unified_diff();
|
||||
if let Some(path) = path {
|
||||
unified_diff.header(&fs::relativize_path(path), &fs::relativize_path(path));
|
||||
}
|
||||
|
||||
let mut stdout = io::stdout().lock();
|
||||
unified_diff.to_writer(&mut stdout)?;
|
||||
stdout.write_all(b"\n")?;
|
||||
stdout.flush()?;
|
||||
}
|
||||
}
|
||||
|
||||
(result, fixed)
|
||||
} else {
|
||||
// If we fail to autofix, lint the original source code.
|
||||
let result = lint_only(
|
||||
contents,
|
||||
path.unwrap_or_else(|| Path::new("-")),
|
||||
package,
|
||||
settings,
|
||||
autofix.into(),
|
||||
);
|
||||
let fixed = 0;
|
||||
|
||||
// Write the contents to stdout anyway.
|
||||
if matches!(autofix, fix::FixMode::Apply) {
|
||||
io::stdout().write_all(contents.as_bytes())?;
|
||||
}
|
||||
|
||||
(result, fixed)
|
||||
}
|
||||
} else {
|
||||
let result = lint_only(
|
||||
contents,
|
||||
path.unwrap_or_else(|| Path::new("-")),
|
||||
package,
|
||||
settings,
|
||||
autofix.into(),
|
||||
);
|
||||
let fixed = 0;
|
||||
(result, fixed)
|
||||
};
|
||||
|
||||
if let Some(err) = parse_error {
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
eprintln!(
|
||||
"{}{} Failed to parse {}: {err}",
|
||||
"error".red().bold(),
|
||||
":".bold(),
|
||||
path.map_or_else(|| "-".into(), fs::relativize_path).bold()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Diagnostics { messages, fixed })
|
||||
}
|
16
crates/ruff_cli/src/iterators.rs
Normal file
16
crates/ruff_cli/src/iterators.rs
Normal file
|
@ -0,0 +1,16 @@
|
|||
#[cfg(not(target_family = "wasm"))]
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// Shim that calls `par_iter` except for wasm because there's no wasm support
|
||||
/// in rayon yet (there is a shim to be used for the web, but it requires js
|
||||
/// cooperation) Unfortunately, `ParallelIterator` does not implement `Iterator`
|
||||
/// so the signatures diverge
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
pub fn par_iter<T: Sync>(iterable: &[T]) -> impl ParallelIterator<Item = &T> {
|
||||
iterable.par_iter()
|
||||
}
|
||||
|
||||
#[cfg(target_family = "wasm")]
|
||||
pub fn par_iter<T: Sync>(iterable: &[T]) -> impl Iterator<Item = &T> {
|
||||
iterable.iter()
|
||||
}
|
27
crates/ruff_cli/src/lib.rs
Normal file
27
crates/ruff_cli/src/lib.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
//! This library only exists to enable the Ruff internal tooling (`ruff_dev`)
|
||||
//! to automatically update the `ruff help` output in the `README.md`.
|
||||
//!
|
||||
//! For the actual Ruff library, see [`ruff`].
|
||||
|
||||
mod args;
|
||||
|
||||
use clap::CommandFactory;
|
||||
|
||||
/// Returns the output of `ruff help`.
|
||||
pub fn command_help() -> String {
|
||||
args::Args::command().render_help().to_string()
|
||||
}
|
||||
|
||||
/// Returns the output of `ruff help check`.
|
||||
pub fn subcommand_help() -> String {
|
||||
let mut cmd = args::Args::command();
|
||||
|
||||
// The build call is necessary for the help output to contain `Usage: ruff check` instead of `Usage: check`
|
||||
// see https://github.com/clap-rs/clap/issues/4685
|
||||
cmd.build();
|
||||
|
||||
cmd.find_subcommand_mut("check")
|
||||
.expect("`check` subcommand not found")
|
||||
.render_help()
|
||||
.to_string()
|
||||
}
|
298
crates/ruff_cli/src/main.rs
Normal file
298
crates/ruff_cli/src/main.rs
Normal file
|
@ -0,0 +1,298 @@
|
|||
use std::io::{self};
|
||||
use std::path::PathBuf;
|
||||
use std::process::ExitCode;
|
||||
use std::sync::mpsc::channel;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::{CommandFactory, Parser, Subcommand};
|
||||
use colored::Colorize;
|
||||
use notify::{recommended_watcher, RecursiveMode, Watcher};
|
||||
|
||||
use ::ruff::logging::{set_up_logging, LogLevel};
|
||||
use ::ruff::resolver::PyprojectDiscovery;
|
||||
use ::ruff::settings::types::SerializationFormat;
|
||||
use ::ruff::settings::CliSettings;
|
||||
use ::ruff::{fix, fs, warn_user_once};
|
||||
use args::{Args, CheckArgs, Command};
|
||||
use printer::{Printer, Violations};
|
||||
|
||||
pub(crate) mod args;
|
||||
mod cache;
|
||||
mod commands;
|
||||
mod diagnostics;
|
||||
mod iterators;
|
||||
mod printer;
|
||||
mod resolve;
|
||||
|
||||
fn inner_main() -> Result<ExitCode> {
|
||||
let mut args: Vec<_> = std::env::args_os().collect();
|
||||
|
||||
// Clap doesn't support default subcommands but we want to run `check` by
|
||||
// default for convenience and backwards-compatibility, so we just
|
||||
// preprocess the arguments accordingly before passing them to Clap.
|
||||
if let Some(arg) = args.get(1).and_then(|s| s.to_str()) {
|
||||
if !Command::has_subcommand(rewrite_legacy_subcommand(arg))
|
||||
&& arg != "-h"
|
||||
&& arg != "--help"
|
||||
&& arg != "-V"
|
||||
&& arg != "--version"
|
||||
&& arg != "help"
|
||||
{
|
||||
args.insert(1, "check".into());
|
||||
}
|
||||
}
|
||||
|
||||
// Extract command-line arguments.
|
||||
let Args {
|
||||
command,
|
||||
log_level_args,
|
||||
} = Args::parse_from(args);
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
let default_panic_hook = std::panic::take_hook();
|
||||
std::panic::set_hook(Box::new(move |info| {
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
eprintln!(
|
||||
r#"
|
||||
{}: `ruff` crashed. This indicates a bug in `ruff`. If you could open an issue at:
|
||||
|
||||
https://github.com/charliermarsh/ruff/issues/new?title=%5BPanic%5D
|
||||
|
||||
quoting the executed command, along with the relevant file contents and `pyproject.toml` settings, we'd be very appreciative!
|
||||
"#,
|
||||
"error".red().bold(),
|
||||
);
|
||||
}
|
||||
default_panic_hook(info);
|
||||
}));
|
||||
}
|
||||
|
||||
let log_level: LogLevel = (&log_level_args).into();
|
||||
set_up_logging(&log_level)?;
|
||||
|
||||
match command {
|
||||
Command::Rule { rule, format } => commands::rule(&rule, format)?,
|
||||
Command::Linter { format } => commands::linter::linter(format),
|
||||
Command::Clean => commands::clean(log_level)?,
|
||||
Command::GenerateShellCompletion { shell } => {
|
||||
shell.generate(&mut Args::command(), &mut io::stdout());
|
||||
}
|
||||
|
||||
Command::Check(args) => return check(args, log_level),
|
||||
}
|
||||
|
||||
Ok(ExitCode::SUCCESS)
|
||||
}
|
||||
|
||||
fn check(args: CheckArgs, log_level: LogLevel) -> Result<ExitCode> {
|
||||
let (cli, overrides) = args.partition();
|
||||
|
||||
// Construct the "default" settings. These are used when no `pyproject.toml`
|
||||
// files are present, or files are injected from outside of the hierarchy.
|
||||
let pyproject_strategy = resolve::resolve(
|
||||
cli.isolated,
|
||||
cli.config.as_deref(),
|
||||
&overrides,
|
||||
cli.stdin_filename.as_deref(),
|
||||
)?;
|
||||
|
||||
if cli.show_settings {
|
||||
commands::show_settings(&cli.files, &pyproject_strategy, &overrides)?;
|
||||
return Ok(ExitCode::SUCCESS);
|
||||
}
|
||||
if cli.show_files {
|
||||
commands::show_files(&cli.files, &pyproject_strategy, &overrides)?;
|
||||
return Ok(ExitCode::SUCCESS);
|
||||
}
|
||||
|
||||
// Extract options that are included in `Settings`, but only apply at the top
|
||||
// level.
|
||||
let CliSettings {
|
||||
fix,
|
||||
fix_only,
|
||||
format,
|
||||
update_check,
|
||||
..
|
||||
} = match &pyproject_strategy {
|
||||
PyprojectDiscovery::Fixed(settings) => settings.cli.clone(),
|
||||
PyprojectDiscovery::Hierarchical(settings) => settings.cli.clone(),
|
||||
};
|
||||
|
||||
// Autofix rules are as follows:
|
||||
// - If `--fix` or `--fix-only` is set, always apply fixes to the filesystem (or
|
||||
// print them to stdout, if we're reading from stdin).
|
||||
// - Otherwise, if `--format json` is set, generate the fixes (so we print them
|
||||
// out as part of the JSON payload), but don't write them to disk.
|
||||
// - If `--diff` or `--fix-only` are set, don't print any violations (only
|
||||
// fixes).
|
||||
// TODO(charlie): Consider adding ESLint's `--fix-dry-run`, which would generate
|
||||
// but not apply fixes. That would allow us to avoid special-casing JSON
|
||||
// here.
|
||||
let autofix = if cli.diff {
|
||||
fix::FixMode::Diff
|
||||
} else if fix || fix_only {
|
||||
fix::FixMode::Apply
|
||||
} else if matches!(format, SerializationFormat::Json) {
|
||||
fix::FixMode::Generate
|
||||
} else {
|
||||
fix::FixMode::None
|
||||
};
|
||||
let violations = if cli.diff || fix_only {
|
||||
Violations::Hide
|
||||
} else {
|
||||
Violations::Show
|
||||
};
|
||||
let cache = !cli.no_cache;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
if cache {
|
||||
// `--no-cache` doesn't respect code changes, and so is often confusing during
|
||||
// development.
|
||||
warn_user_once!("Detected debug build without --no-cache.");
|
||||
}
|
||||
|
||||
if cli.add_noqa {
|
||||
if !matches!(autofix, fix::FixMode::None) {
|
||||
warn_user_once!("--fix is incompatible with --add-noqa.");
|
||||
}
|
||||
let modifications = commands::add_noqa(&cli.files, &pyproject_strategy, &overrides)?;
|
||||
if modifications > 0 && log_level >= LogLevel::Default {
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
eprintln!("Added {modifications} noqa directives.");
|
||||
}
|
||||
}
|
||||
return Ok(ExitCode::SUCCESS);
|
||||
}
|
||||
|
||||
let printer = Printer::new(&format, &log_level, &autofix, &violations);
|
||||
|
||||
if cli.watch {
|
||||
if !matches!(autofix, fix::FixMode::None) {
|
||||
warn_user_once!("--fix is unsupported in watch mode.");
|
||||
}
|
||||
if format != SerializationFormat::Text {
|
||||
warn_user_once!("--format 'text' is used in watch mode.");
|
||||
}
|
||||
|
||||
// Perform an initial run instantly.
|
||||
Printer::clear_screen()?;
|
||||
printer.write_to_user("Starting linter in watch mode...\n");
|
||||
|
||||
let messages = commands::run(
|
||||
&cli.files,
|
||||
&pyproject_strategy,
|
||||
&overrides,
|
||||
cache.into(),
|
||||
fix::FixMode::None,
|
||||
)?;
|
||||
printer.write_continuously(&messages)?;
|
||||
|
||||
// Configure the file watcher.
|
||||
let (tx, rx) = channel();
|
||||
let mut watcher = recommended_watcher(tx)?;
|
||||
for file in &cli.files {
|
||||
watcher.watch(file, RecursiveMode::Recursive)?;
|
||||
}
|
||||
|
||||
loop {
|
||||
match rx.recv() {
|
||||
Ok(event) => {
|
||||
let paths = event?.paths;
|
||||
let py_changed = paths.iter().any(|path| {
|
||||
path.extension()
|
||||
.map(|ext| ext == "py" || ext == "pyi")
|
||||
.unwrap_or_default()
|
||||
});
|
||||
if py_changed {
|
||||
Printer::clear_screen()?;
|
||||
printer.write_to_user("File change detected...\n");
|
||||
|
||||
let messages = commands::run(
|
||||
&cli.files,
|
||||
&pyproject_strategy,
|
||||
&overrides,
|
||||
cache.into(),
|
||||
fix::FixMode::None,
|
||||
)?;
|
||||
printer.write_continuously(&messages)?;
|
||||
}
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let is_stdin = cli.files == vec![PathBuf::from("-")];
|
||||
|
||||
// Generate lint violations.
|
||||
let diagnostics = if is_stdin {
|
||||
commands::run_stdin(
|
||||
cli.stdin_filename.map(fs::normalize_path).as_deref(),
|
||||
&pyproject_strategy,
|
||||
&overrides,
|
||||
autofix,
|
||||
)?
|
||||
} else {
|
||||
commands::run(
|
||||
&cli.files,
|
||||
&pyproject_strategy,
|
||||
&overrides,
|
||||
cache.into(),
|
||||
autofix,
|
||||
)?
|
||||
};
|
||||
|
||||
// Always try to print violations (the printer itself may suppress output),
|
||||
// unless we're writing fixes via stdin (in which case, the transformed
|
||||
// source code goes to stdout).
|
||||
if !(is_stdin && matches!(autofix, fix::FixMode::Apply | fix::FixMode::Diff)) {
|
||||
if cli.statistics {
|
||||
printer.write_statistics(&diagnostics)?;
|
||||
} else {
|
||||
printer.write_once(&diagnostics)?;
|
||||
}
|
||||
}
|
||||
|
||||
if update_check {
|
||||
warn_user_once!(
|
||||
"update-check has been removed; setting it will cause an error in a future version."
|
||||
);
|
||||
}
|
||||
|
||||
if !cli.exit_zero {
|
||||
if cli.diff || fix_only {
|
||||
if diagnostics.fixed > 0 {
|
||||
return Ok(ExitCode::FAILURE);
|
||||
}
|
||||
} else if !diagnostics.messages.is_empty() {
|
||||
return Ok(ExitCode::FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(ExitCode::SUCCESS)
|
||||
}
|
||||
|
||||
fn rewrite_legacy_subcommand(cmd: &str) -> &str {
|
||||
match cmd {
|
||||
"--explain" => "rule",
|
||||
"--clean" => "clean",
|
||||
"--generate-shell-completion" => "generate-shell-completion",
|
||||
cmd => cmd,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn main() -> ExitCode {
|
||||
match inner_main() {
|
||||
Ok(code) => code,
|
||||
Err(err) => {
|
||||
#[allow(clippy::print_stderr)]
|
||||
{
|
||||
eprintln!("{}{} {err:?}", "error".red().bold(), ":".bold());
|
||||
}
|
||||
ExitCode::FAILURE
|
||||
}
|
||||
}
|
||||
}
|
625
crates/ruff_cli/src/printer.rs
Normal file
625
crates/ruff_cli/src/printer.rs
Normal file
|
@ -0,0 +1,625 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::io;
|
||||
use std::io::{BufWriter, Write};
|
||||
use std::path::Path;
|
||||
|
||||
use annotate_snippets::display_list::{DisplayList, FormatOptions};
|
||||
use annotate_snippets::snippet::{Annotation, AnnotationType, Slice, Snippet, SourceAnnotation};
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use itertools::{iterate, Itertools};
|
||||
use ruff::fs::relativize_path;
|
||||
use ruff::logging::LogLevel;
|
||||
use ruff::message::{Location, Message};
|
||||
use ruff::registry::Rule;
|
||||
use ruff::settings::types::SerializationFormat;
|
||||
use ruff::{fix, notify_user};
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::diagnostics::Diagnostics;
|
||||
|
||||
/// Enum to control whether lint violations are shown to the user.
|
||||
pub enum Violations {
|
||||
Show,
|
||||
Hide,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ExpandedFix<'a> {
|
||||
content: &'a str,
|
||||
message: Option<String>,
|
||||
location: &'a Location,
|
||||
end_location: &'a Location,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ExpandedMessage<'a> {
|
||||
code: SerializeRuleAsCode<'a>,
|
||||
message: String,
|
||||
fix: Option<ExpandedFix<'a>>,
|
||||
location: Location,
|
||||
end_location: Location,
|
||||
filename: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ExpandedStatistics<'a> {
|
||||
count: usize,
|
||||
code: &'a str,
|
||||
message: String,
|
||||
}
|
||||
|
||||
struct SerializeRuleAsCode<'a>(&'a Rule);
|
||||
|
||||
impl Serialize for SerializeRuleAsCode<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(self.0.code())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Rule> for SerializeRuleAsCode<'a> {
|
||||
fn from(rule: &'a Rule) -> Self {
|
||||
Self(rule)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Printer<'a> {
|
||||
format: &'a SerializationFormat,
|
||||
log_level: &'a LogLevel,
|
||||
autofix: &'a fix::FixMode,
|
||||
violations: &'a Violations,
|
||||
}
|
||||
|
||||
impl<'a> Printer<'a> {
|
||||
pub const fn new(
|
||||
format: &'a SerializationFormat,
|
||||
log_level: &'a LogLevel,
|
||||
autofix: &'a fix::FixMode,
|
||||
violations: &'a Violations,
|
||||
) -> Self {
|
||||
Self {
|
||||
format,
|
||||
log_level,
|
||||
autofix,
|
||||
violations,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_to_user(&self, message: &str) {
|
||||
if self.log_level >= &LogLevel::Default {
|
||||
notify_user!("{}", message);
|
||||
}
|
||||
}
|
||||
|
||||
fn post_text<T: Write>(&self, stdout: &mut T, diagnostics: &Diagnostics) -> Result<()> {
|
||||
if self.log_level >= &LogLevel::Default {
|
||||
match self.violations {
|
||||
Violations::Show => {
|
||||
let fixed = diagnostics.fixed;
|
||||
let remaining = diagnostics.messages.len();
|
||||
let total = fixed + remaining;
|
||||
if fixed > 0 {
|
||||
let s = if total == 1 { "" } else { "s" };
|
||||
writeln!(
|
||||
stdout,
|
||||
"Found {total} error{s} ({fixed} fixed, {remaining} remaining)."
|
||||
)?;
|
||||
} else if remaining > 0 {
|
||||
let s = if remaining == 1 { "" } else { "s" };
|
||||
writeln!(stdout, "Found {remaining} error{s}.")?;
|
||||
}
|
||||
|
||||
if !matches!(self.autofix, fix::FixMode::Apply) {
|
||||
let num_fixable = diagnostics
|
||||
.messages
|
||||
.iter()
|
||||
.filter(|message| message.kind.fixable())
|
||||
.count();
|
||||
if num_fixable > 0 {
|
||||
writeln!(
|
||||
stdout,
|
||||
"[*] {num_fixable} potentially fixable with the --fix option."
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Violations::Hide => {
|
||||
let fixed = diagnostics.fixed;
|
||||
if fixed > 0 {
|
||||
let s = if fixed == 1 { "" } else { "s" };
|
||||
if matches!(self.autofix, fix::FixMode::Apply) {
|
||||
writeln!(stdout, "Fixed {fixed} error{s}.")?;
|
||||
} else if matches!(self.autofix, fix::FixMode::Diff) {
|
||||
writeln!(stdout, "Would fix {fixed} error{s}.")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn write_once(&self, diagnostics: &Diagnostics) -> Result<()> {
|
||||
if matches!(self.log_level, LogLevel::Silent) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if matches!(self.violations, Violations::Hide) {
|
||||
let mut stdout = BufWriter::new(io::stdout().lock());
|
||||
if matches!(
|
||||
self.format,
|
||||
SerializationFormat::Text | SerializationFormat::Grouped
|
||||
) {
|
||||
self.post_text(&mut stdout, diagnostics)?;
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut stdout = BufWriter::new(io::stdout().lock());
|
||||
match self.format {
|
||||
SerializationFormat::Json => {
|
||||
writeln!(
|
||||
stdout,
|
||||
"{}",
|
||||
serde_json::to_string_pretty(
|
||||
&diagnostics
|
||||
.messages
|
||||
.iter()
|
||||
.map(|message| ExpandedMessage {
|
||||
code: message.kind.rule().into(),
|
||||
message: message.kind.body(),
|
||||
fix: message.fix.as_ref().map(|fix| ExpandedFix {
|
||||
content: &fix.content,
|
||||
location: &fix.location,
|
||||
end_location: &fix.end_location,
|
||||
message: message.kind.commit(),
|
||||
}),
|
||||
location: message.location,
|
||||
end_location: message.end_location,
|
||||
filename: &message.filename,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
)?
|
||||
)?;
|
||||
}
|
||||
SerializationFormat::Junit => {
|
||||
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
|
||||
|
||||
let mut report = Report::new("ruff");
|
||||
for (filename, messages) in group_messages_by_filename(&diagnostics.messages) {
|
||||
let mut test_suite = TestSuite::new(filename);
|
||||
test_suite
|
||||
.extra
|
||||
.insert("package".to_string(), "org.ruff".to_string());
|
||||
for message in messages {
|
||||
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
|
||||
status.set_message(message.kind.body());
|
||||
status.set_description(format!(
|
||||
"line {}, col {}, {}",
|
||||
message.location.row(),
|
||||
message.location.column(),
|
||||
message.kind.body()
|
||||
));
|
||||
let mut case = TestCase::new(
|
||||
format!("org.ruff.{}", message.kind.rule().code()),
|
||||
status,
|
||||
);
|
||||
let file_path = Path::new(filename);
|
||||
let file_stem = file_path.file_stem().unwrap().to_str().unwrap();
|
||||
let classname = file_path.parent().unwrap().join(file_stem);
|
||||
case.set_classname(classname.to_str().unwrap());
|
||||
case.extra
|
||||
.insert("line".to_string(), message.location.row().to_string());
|
||||
case.extra
|
||||
.insert("column".to_string(), message.location.column().to_string());
|
||||
|
||||
test_suite.add_test_case(case);
|
||||
}
|
||||
report.add_test_suite(test_suite);
|
||||
}
|
||||
writeln!(stdout, "{}", report.to_string().unwrap())?;
|
||||
}
|
||||
SerializationFormat::Text => {
|
||||
for message in &diagnostics.messages {
|
||||
print_message(&mut stdout, message)?;
|
||||
}
|
||||
|
||||
self.post_text(&mut stdout, diagnostics)?;
|
||||
}
|
||||
SerializationFormat::Grouped => {
|
||||
for (filename, messages) in group_messages_by_filename(&diagnostics.messages) {
|
||||
// Compute the maximum number of digits in the row and column, for messages in
|
||||
// this file.
|
||||
let row_length = num_digits(
|
||||
messages
|
||||
.iter()
|
||||
.map(|message| message.location.row())
|
||||
.max()
|
||||
.unwrap(),
|
||||
);
|
||||
let column_length = num_digits(
|
||||
messages
|
||||
.iter()
|
||||
.map(|message| message.location.column())
|
||||
.max()
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
// Print the filename.
|
||||
writeln!(
|
||||
stdout,
|
||||
"{}:",
|
||||
relativize_path(Path::new(&filename)).underline()
|
||||
)?;
|
||||
|
||||
// Print each message.
|
||||
for message in messages {
|
||||
print_grouped_message(&mut stdout, message, row_length, column_length)?;
|
||||
}
|
||||
writeln!(stdout)?;
|
||||
}
|
||||
|
||||
self.post_text(&mut stdout, diagnostics)?;
|
||||
}
|
||||
SerializationFormat::Github => {
|
||||
// Generate error workflow command in GitHub Actions format.
|
||||
// See: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message
|
||||
for message in &diagnostics.messages {
|
||||
let label = format!(
|
||||
"{}{}{}{}{}{} {} {}",
|
||||
relativize_path(Path::new(&message.filename)),
|
||||
":",
|
||||
message.location.row(),
|
||||
":",
|
||||
message.location.column(),
|
||||
":",
|
||||
message.kind.rule().code(),
|
||||
message.kind.body(),
|
||||
);
|
||||
writeln!(
|
||||
stdout,
|
||||
"::error title=Ruff \
|
||||
({}),file={},line={},col={},endLine={},endColumn={}::{}",
|
||||
message.kind.rule().code(),
|
||||
message.filename,
|
||||
message.location.row(),
|
||||
message.location.column(),
|
||||
message.end_location.row(),
|
||||
message.end_location.column(),
|
||||
label,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
SerializationFormat::Gitlab => {
|
||||
// Generate JSON with violations in GitLab CI format
|
||||
// https://docs.gitlab.com/ee/ci/testing/code_quality.html#implementing-a-custom-tool
|
||||
writeln!(stdout,
|
||||
"{}",
|
||||
serde_json::to_string_pretty(
|
||||
&diagnostics
|
||||
.messages
|
||||
.iter()
|
||||
.map(|message| {
|
||||
json!({
|
||||
"description": format!("({}) {}", message.kind.rule().code(), message.kind.body()),
|
||||
"severity": "major",
|
||||
"fingerprint": message.kind.rule().code(),
|
||||
"location": {
|
||||
"path": message.filename,
|
||||
"lines": {
|
||||
"begin": message.location.row(),
|
||||
"end": message.end_location.row()
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
.collect::<Vec<_>>()
|
||||
)?
|
||||
)?;
|
||||
}
|
||||
SerializationFormat::Pylint => {
|
||||
// Generate violations in Pylint format.
|
||||
// See: https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter
|
||||
for message in &diagnostics.messages {
|
||||
let label = format!(
|
||||
"{}:{}: [{}] {}",
|
||||
relativize_path(Path::new(&message.filename)),
|
||||
message.location.row(),
|
||||
message.kind.rule().code(),
|
||||
message.kind.body(),
|
||||
);
|
||||
writeln!(stdout, "{label}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stdout.flush()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn write_statistics(&self, diagnostics: &Diagnostics) -> Result<()> {
|
||||
let violations = diagnostics
|
||||
.messages
|
||||
.iter()
|
||||
.map(|message| message.kind.rule())
|
||||
.sorted()
|
||||
.dedup()
|
||||
.collect::<Vec<_>>();
|
||||
if violations.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let statistics = violations
|
||||
.iter()
|
||||
.map(|rule| ExpandedStatistics {
|
||||
code: rule.code(),
|
||||
count: diagnostics
|
||||
.messages
|
||||
.iter()
|
||||
.filter(|message| message.kind.rule() == *rule)
|
||||
.count(),
|
||||
message: diagnostics
|
||||
.messages
|
||||
.iter()
|
||||
.find(|message| message.kind.rule() == *rule)
|
||||
.map(|message| message.kind.body())
|
||||
.unwrap(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut stdout = BufWriter::new(io::stdout().lock());
|
||||
match self.format {
|
||||
SerializationFormat::Text => {
|
||||
// Compute the maximum number of digits in the count and code, for all messages, to enable
|
||||
// pretty-printing.
|
||||
let count_width = num_digits(
|
||||
statistics
|
||||
.iter()
|
||||
.map(|statistic| statistic.count)
|
||||
.max()
|
||||
.unwrap(),
|
||||
);
|
||||
let code_width = statistics
|
||||
.iter()
|
||||
.map(|statistic| statistic.code.len())
|
||||
.max()
|
||||
.unwrap();
|
||||
|
||||
// By default, we mimic Flake8's `--statistics` format.
|
||||
for msg in statistics {
|
||||
writeln!(
|
||||
stdout,
|
||||
"{:>count_width$}\t{:<code_width$}\t{}",
|
||||
msg.count, msg.code, msg.message
|
||||
)?;
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
SerializationFormat::Json => {
|
||||
writeln!(stdout, "{}", serde_json::to_string_pretty(&statistics)?)?;
|
||||
}
|
||||
_ => {
|
||||
anyhow::bail!(
|
||||
"Unsupported serialization format for statistics: {:?}",
|
||||
self.format
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
stdout.flush()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn write_continuously(&self, diagnostics: &Diagnostics) -> Result<()> {
|
||||
if matches!(self.log_level, LogLevel::Silent) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if self.log_level >= &LogLevel::Default {
|
||||
let s = if diagnostics.messages.len() == 1 {
|
||||
""
|
||||
} else {
|
||||
"s"
|
||||
};
|
||||
notify_user!(
|
||||
"Found {} error{s}. Watching for file changes.",
|
||||
diagnostics.messages.len()
|
||||
);
|
||||
}
|
||||
|
||||
let mut stdout = BufWriter::new(io::stdout().lock());
|
||||
if !diagnostics.messages.is_empty() {
|
||||
if self.log_level >= &LogLevel::Default {
|
||||
writeln!(stdout)?;
|
||||
}
|
||||
for message in &diagnostics.messages {
|
||||
print_message(&mut stdout, message)?;
|
||||
}
|
||||
}
|
||||
stdout.flush()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn clear_screen() -> Result<()> {
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
clearscreen::clear()?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn group_messages_by_filename(messages: &[Message]) -> BTreeMap<&String, Vec<&Message>> {
|
||||
let mut grouped_messages = BTreeMap::default();
|
||||
for message in messages {
|
||||
grouped_messages
|
||||
.entry(&message.filename)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(message);
|
||||
}
|
||||
grouped_messages
|
||||
}
|
||||
|
||||
fn num_digits(n: usize) -> usize {
|
||||
iterate(n, |&n| n / 10)
|
||||
.take_while(|&n| n > 0)
|
||||
.count()
|
||||
.max(1)
|
||||
}
|
||||
|
||||
/// Print a single `Message` with full details.
|
||||
fn print_message<T: Write>(stdout: &mut T, message: &Message) -> Result<()> {
|
||||
let label = if message.kind.fixable() {
|
||||
format!(
|
||||
"{}{}{}{}{}{} {} [*] {}",
|
||||
relativize_path(Path::new(&message.filename)).bold(),
|
||||
":".cyan(),
|
||||
message.location.row(),
|
||||
":".cyan(),
|
||||
message.location.column(),
|
||||
":".cyan(),
|
||||
message.kind.rule().code().red().bold(),
|
||||
message.kind.body(),
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"{}{}{}{}{}{} {} {}",
|
||||
relativize_path(Path::new(&message.filename)).bold(),
|
||||
":".cyan(),
|
||||
message.location.row(),
|
||||
":".cyan(),
|
||||
message.location.column(),
|
||||
":".cyan(),
|
||||
message.kind.rule().code().red().bold(),
|
||||
message.kind.body(),
|
||||
)
|
||||
};
|
||||
writeln!(stdout, "{label}")?;
|
||||
if let Some(source) = &message.source {
|
||||
let commit = message.kind.commit();
|
||||
let footer = if commit.is_some() {
|
||||
vec![Annotation {
|
||||
id: None,
|
||||
label: commit.as_deref(),
|
||||
annotation_type: AnnotationType::Help,
|
||||
}]
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
let snippet = Snippet {
|
||||
title: Some(Annotation {
|
||||
label: None,
|
||||
annotation_type: AnnotationType::Error,
|
||||
// The ID (error number) is already encoded in the `label`.
|
||||
id: None,
|
||||
}),
|
||||
footer,
|
||||
slices: vec![Slice {
|
||||
source: &source.contents,
|
||||
line_start: message.location.row(),
|
||||
annotations: vec![SourceAnnotation {
|
||||
label: message.kind.rule().code(),
|
||||
annotation_type: AnnotationType::Error,
|
||||
range: source.range,
|
||||
}],
|
||||
// The origin (file name, line number, and column number) is already encoded
|
||||
// in the `label`.
|
||||
origin: None,
|
||||
fold: false,
|
||||
}],
|
||||
opt: FormatOptions {
|
||||
color: true,
|
||||
..FormatOptions::default()
|
||||
},
|
||||
};
|
||||
// Skip the first line, since we format the `label` ourselves.
|
||||
let message = DisplayList::from(snippet).to_string();
|
||||
let (_, message) = message.split_once('\n').unwrap();
|
||||
writeln!(stdout, "{message}\n")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Print a grouped `Message`, assumed to be printed in a group with others from
|
||||
/// the same file.
|
||||
fn print_grouped_message<T: Write>(
|
||||
stdout: &mut T,
|
||||
message: &Message,
|
||||
row_length: usize,
|
||||
column_length: usize,
|
||||
) -> Result<()> {
|
||||
let label = if message.kind.fixable() {
|
||||
format!(
|
||||
" {}{}{}{}{} {} [*] {}",
|
||||
" ".repeat(row_length - num_digits(message.location.row())),
|
||||
message.location.row(),
|
||||
":".cyan(),
|
||||
message.location.column(),
|
||||
" ".repeat(column_length - num_digits(message.location.column())),
|
||||
message.kind.rule().code().red().bold(),
|
||||
message.kind.body(),
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
" {}{}{}{}{} {} {}",
|
||||
" ".repeat(row_length - num_digits(message.location.row())),
|
||||
message.location.row(),
|
||||
":".cyan(),
|
||||
message.location.column(),
|
||||
" ".repeat(column_length - num_digits(message.location.column())),
|
||||
message.kind.rule().code().red().bold(),
|
||||
message.kind.body(),
|
||||
)
|
||||
};
|
||||
writeln!(stdout, "{label}")?;
|
||||
if let Some(source) = &message.source {
|
||||
let commit = message.kind.commit();
|
||||
let footer = if commit.is_some() {
|
||||
vec![Annotation {
|
||||
id: None,
|
||||
label: commit.as_deref(),
|
||||
annotation_type: AnnotationType::Help,
|
||||
}]
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
let snippet = Snippet {
|
||||
title: Some(Annotation {
|
||||
label: None,
|
||||
annotation_type: AnnotationType::Error,
|
||||
// The ID (error number) is already encoded in the `label`.
|
||||
id: None,
|
||||
}),
|
||||
footer,
|
||||
slices: vec![Slice {
|
||||
source: &source.contents,
|
||||
line_start: message.location.row(),
|
||||
annotations: vec![SourceAnnotation {
|
||||
label: message.kind.rule().code(),
|
||||
annotation_type: AnnotationType::Error,
|
||||
range: source.range,
|
||||
}],
|
||||
// The origin (file name, line number, and column number) is already encoded
|
||||
// in the `label`.
|
||||
origin: None,
|
||||
fold: false,
|
||||
}],
|
||||
opt: FormatOptions {
|
||||
color: true,
|
||||
..FormatOptions::default()
|
||||
},
|
||||
};
|
||||
// Skip the first line, since we format the `label` ourselves.
|
||||
let message = DisplayList::from(snippet).to_string();
|
||||
let (_, message) = message.split_once('\n').unwrap();
|
||||
let message = textwrap::indent(message, " ");
|
||||
writeln!(stdout, "{message}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
68
crates/ruff_cli/src/resolve.rs
Normal file
68
crates/ruff_cli/src/resolve.rs
Normal file
|
@ -0,0 +1,68 @@
|
|||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use path_absolutize::path_dedot;
|
||||
use ruff::resolver::{
|
||||
resolve_settings_with_processor, ConfigProcessor, PyprojectDiscovery, Relativity,
|
||||
};
|
||||
use ruff::settings::configuration::Configuration;
|
||||
use ruff::settings::{pyproject, AllSettings};
|
||||
|
||||
use crate::args::Overrides;
|
||||
|
||||
/// Resolve the relevant settings strategy and defaults for the current
|
||||
/// invocation.
|
||||
pub fn resolve(
|
||||
isolated: bool,
|
||||
config: Option<&Path>,
|
||||
overrides: &Overrides,
|
||||
stdin_filename: Option<&Path>,
|
||||
) -> Result<PyprojectDiscovery> {
|
||||
// First priority: if we're running in isolated mode, use the default settings.
|
||||
if isolated {
|
||||
let mut config = Configuration::default();
|
||||
overrides.process_config(&mut config);
|
||||
let settings = AllSettings::from_configuration(config, &path_dedot::CWD)?;
|
||||
return Ok(PyprojectDiscovery::Fixed(settings));
|
||||
}
|
||||
|
||||
// Second priority: the user specified a `pyproject.toml` file. Use that
|
||||
// `pyproject.toml` for _all_ configuration, and resolve paths relative to the
|
||||
// current working directory. (This matches ESLint's behavior.)
|
||||
if let Some(pyproject) = config {
|
||||
let settings = resolve_settings_with_processor(pyproject, &Relativity::Cwd, overrides)?;
|
||||
return Ok(PyprojectDiscovery::Fixed(settings));
|
||||
}
|
||||
|
||||
// Third priority: find a `pyproject.toml` file in either an ancestor of
|
||||
// `stdin_filename` (if set) or the current working path all paths relative to
|
||||
// that directory. (With `Strategy::Hierarchical`, we'll end up finding
|
||||
// the "closest" `pyproject.toml` file for every Python file later on,
|
||||
// so these act as the "default" settings.)
|
||||
if let Some(pyproject) = pyproject::find_settings_toml(
|
||||
stdin_filename
|
||||
.as_ref()
|
||||
.unwrap_or(&path_dedot::CWD.as_path()),
|
||||
)? {
|
||||
let settings = resolve_settings_with_processor(&pyproject, &Relativity::Parent, overrides)?;
|
||||
return Ok(PyprojectDiscovery::Hierarchical(settings));
|
||||
}
|
||||
|
||||
// Fourth priority: find a user-specific `pyproject.toml`, but resolve all paths
|
||||
// relative the current working directory. (With `Strategy::Hierarchical`, we'll
|
||||
// end up the "closest" `pyproject.toml` file for every Python file later on, so
|
||||
// these act as the "default" settings.)
|
||||
if let Some(pyproject) = pyproject::find_user_settings_toml() {
|
||||
let settings = resolve_settings_with_processor(&pyproject, &Relativity::Cwd, overrides)?;
|
||||
return Ok(PyprojectDiscovery::Hierarchical(settings));
|
||||
}
|
||||
|
||||
// Fallback: load Ruff's default settings, and resolve all paths relative to the
|
||||
// current working directory. (With `Strategy::Hierarchical`, we'll end up the
|
||||
// "closest" `pyproject.toml` file for every Python file later on, so these act
|
||||
// as the "default" settings.)
|
||||
let mut config = Configuration::default();
|
||||
overrides.process_config(&mut config);
|
||||
let settings = AllSettings::from_configuration(config, &path_dedot::CWD)?;
|
||||
Ok(PyprojectDiscovery::Hierarchical(settings))
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue