mirror of
https://github.com/astral-sh/ruff.git
synced 2025-10-03 07:04:37 +00:00
Support hierarchical settings for nested directories (#1190)
This commit is contained in:
parent
19e9eb1af8
commit
0adc9ed259
18 changed files with 323 additions and 161 deletions
|
@ -1,19 +1,21 @@
|
|||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use serde::Serialize;
|
||||
use walkdir::DirEntry;
|
||||
|
||||
use crate::checks::CheckCode;
|
||||
use crate::cli::Overrides;
|
||||
use crate::fs::iter_python_files;
|
||||
use crate::resolver::{discover_settings, Resolver};
|
||||
use crate::settings::types::SerializationFormat;
|
||||
use crate::{Configuration, Settings};
|
||||
|
||||
/// Print the user-facing configuration settings.
|
||||
pub fn show_settings(
|
||||
configuration: &Configuration,
|
||||
project_root: Option<&PathBuf>,
|
||||
pyproject: Option<&PathBuf>,
|
||||
project_root: Option<&Path>,
|
||||
pyproject: Option<&Path>,
|
||||
) {
|
||||
println!("Resolved configuration: {configuration:#?}");
|
||||
println!("Found project root at: {project_root:?}");
|
||||
|
@ -21,13 +23,23 @@ pub fn show_settings(
|
|||
}
|
||||
|
||||
/// Show the list of files to be checked based on current settings.
|
||||
pub fn show_files(files: &[PathBuf], settings: &Settings) {
|
||||
pub fn show_files(files: &[PathBuf], default: &Settings, overrides: &Overrides) {
|
||||
// Discover the settings for the filesystem hierarchy.
|
||||
let settings = discover_settings(files, overrides);
|
||||
let resolver = Resolver {
|
||||
default,
|
||||
settings: &settings,
|
||||
};
|
||||
|
||||
// Collect all files in the hierarchy.
|
||||
let mut entries: Vec<DirEntry> = files
|
||||
.iter()
|
||||
.flat_map(|path| iter_python_files(path, &settings.exclude, &settings.extend_exclude))
|
||||
.flat_map(|path| iter_python_files(path, &resolver))
|
||||
.flatten()
|
||||
.collect();
|
||||
entries.sort_by(|a, b| a.path().cmp(b.path()));
|
||||
|
||||
// Print the list of files.
|
||||
for entry in entries {
|
||||
println!("{}", entry.path().to_string_lossy());
|
||||
}
|
||||
|
|
76
src/fs.rs
76
src/fs.rs
|
@ -11,6 +11,7 @@ use rustc_hash::FxHashSet;
|
|||
use walkdir::{DirEntry, WalkDir};
|
||||
|
||||
use crate::checks::CheckCode;
|
||||
use crate::resolver::Resolver;
|
||||
|
||||
/// Extract the absolute path and basename (as strings) from a Path.
|
||||
fn extract_path_names(path: &Path) -> Result<(&str, &str)> {
|
||||
|
@ -30,33 +31,62 @@ fn is_excluded(file_path: &str, file_basename: &str, exclude: &globset::GlobSet)
|
|||
}
|
||||
|
||||
fn is_included(path: &Path) -> bool {
|
||||
let file_name = path.to_string_lossy();
|
||||
file_name.ends_with(".py") || file_name.ends_with(".pyi")
|
||||
path.extension()
|
||||
.map_or(false, |ext| ext == "py" || ext == "pyi")
|
||||
}
|
||||
|
||||
/// Find all `pyproject.toml` files for a given `Path`. Both parents and
|
||||
/// children will be included in the resulting `Vec`.
|
||||
pub fn iter_pyproject_files(path: &Path) -> Vec<PathBuf> {
|
||||
let mut paths = Vec::new();
|
||||
|
||||
// Search for `pyproject.toml` files in all parent directories.
|
||||
let path = normalize_path(path);
|
||||
for path in path.ancestors() {
|
||||
if path.is_dir() {
|
||||
let toml_path = path.join("pyproject.toml");
|
||||
if toml_path.exists() {
|
||||
paths.push(toml_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Search for `pyproject.toml` files in all child directories.
|
||||
for path in WalkDir::new(path)
|
||||
.into_iter()
|
||||
.filter_entry(|entry| {
|
||||
entry.file_name().to_str().map_or(false, |file_name| {
|
||||
entry.depth() == 0 || !file_name.starts_with('.')
|
||||
})
|
||||
})
|
||||
.filter_map(std::result::Result::ok)
|
||||
.filter(|entry| entry.path().ends_with("pyproject.toml"))
|
||||
{
|
||||
paths.push(path.into_path());
|
||||
}
|
||||
|
||||
paths
|
||||
}
|
||||
|
||||
/// Find all Python (`.py` and `.pyi` files) in a given `Path`.
|
||||
pub fn iter_python_files<'a>(
|
||||
path: &'a Path,
|
||||
exclude: &'a globset::GlobSet,
|
||||
extend_exclude: &'a globset::GlobSet,
|
||||
resolver: &'a Resolver<'a>,
|
||||
) -> impl Iterator<Item = Result<DirEntry, walkdir::Error>> + 'a {
|
||||
// Run some checks over the provided patterns, to enable optimizations below.
|
||||
let has_exclude = !exclude.is_empty();
|
||||
let has_extend_exclude = !extend_exclude.is_empty();
|
||||
|
||||
WalkDir::new(normalize_path(path))
|
||||
.into_iter()
|
||||
.filter_entry(move |entry| {
|
||||
if !has_exclude && !has_extend_exclude {
|
||||
return true;
|
||||
}
|
||||
|
||||
let path = entry.path();
|
||||
let settings = resolver.resolve(path);
|
||||
let exclude = &settings.exclude;
|
||||
let extend_exclude = &settings.extend_exclude;
|
||||
|
||||
match extract_path_names(path) {
|
||||
Ok((file_path, file_basename)) => {
|
||||
if has_exclude && is_excluded(file_path, file_basename, exclude) {
|
||||
if !exclude.is_empty() && is_excluded(file_path, file_basename, exclude) {
|
||||
debug!("Ignored path via `exclude`: {:?}", path);
|
||||
false
|
||||
} else if has_extend_exclude
|
||||
} else if !extend_exclude.is_empty()
|
||||
&& is_excluded(file_path, file_basename, extend_exclude)
|
||||
{
|
||||
debug!("Ignored path via `extend-exclude`: {:?}", path);
|
||||
|
@ -131,7 +161,7 @@ pub(crate) fn read_file(path: &Path) -> Result<String> {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use globset::GlobSet;
|
||||
|
@ -155,7 +185,7 @@ mod tests {
|
|||
assert!(!is_included(&path));
|
||||
}
|
||||
|
||||
fn make_exclusion(file_pattern: FilePattern, project_root: Option<&PathBuf>) -> GlobSet {
|
||||
fn make_exclusion(file_pattern: FilePattern, project_root: Option<&Path>) -> GlobSet {
|
||||
let mut builder = globset::GlobSetBuilder::new();
|
||||
file_pattern.add_to(&mut builder, project_root).unwrap();
|
||||
builder.build().unwrap()
|
||||
|
@ -171,7 +201,7 @@ mod tests {
|
|||
assert!(is_excluded(
|
||||
file_path,
|
||||
file_basename,
|
||||
&make_exclusion(exclude, Some(&project_root.to_path_buf()))
|
||||
&make_exclusion(exclude, Some(project_root))
|
||||
));
|
||||
|
||||
let path = Path::new("foo/bar").absolutize_from(project_root).unwrap();
|
||||
|
@ -180,7 +210,7 @@ mod tests {
|
|||
assert!(is_excluded(
|
||||
file_path,
|
||||
file_basename,
|
||||
&make_exclusion(exclude, Some(&project_root.to_path_buf()))
|
||||
&make_exclusion(exclude, Some(project_root))
|
||||
));
|
||||
|
||||
let path = Path::new("foo/bar/baz.py")
|
||||
|
@ -191,7 +221,7 @@ mod tests {
|
|||
assert!(is_excluded(
|
||||
file_path,
|
||||
file_basename,
|
||||
&make_exclusion(exclude, Some(&project_root.to_path_buf()))
|
||||
&make_exclusion(exclude, Some(project_root))
|
||||
));
|
||||
|
||||
let path = Path::new("foo/bar").absolutize_from(project_root).unwrap();
|
||||
|
@ -200,7 +230,7 @@ mod tests {
|
|||
assert!(is_excluded(
|
||||
file_path,
|
||||
file_basename,
|
||||
&make_exclusion(exclude, Some(&project_root.to_path_buf()))
|
||||
&make_exclusion(exclude, Some(project_root))
|
||||
));
|
||||
|
||||
let path = Path::new("foo/bar/baz.py")
|
||||
|
@ -211,7 +241,7 @@ mod tests {
|
|||
assert!(is_excluded(
|
||||
file_path,
|
||||
file_basename,
|
||||
&make_exclusion(exclude, Some(&project_root.to_path_buf()))
|
||||
&make_exclusion(exclude, Some(project_root))
|
||||
));
|
||||
|
||||
let path = Path::new("foo/bar/baz.py")
|
||||
|
@ -222,7 +252,7 @@ mod tests {
|
|||
assert!(is_excluded(
|
||||
file_path,
|
||||
file_basename,
|
||||
&make_exclusion(exclude, Some(&project_root.to_path_buf()))
|
||||
&make_exclusion(exclude, Some(project_root))
|
||||
));
|
||||
|
||||
let path = Path::new("foo/bar/baz.py")
|
||||
|
@ -233,7 +263,7 @@ mod tests {
|
|||
assert!(!is_excluded(
|
||||
file_path,
|
||||
file_basename,
|
||||
&make_exclusion(exclude, Some(&project_root.to_path_buf()))
|
||||
&make_exclusion(exclude, Some(project_root))
|
||||
));
|
||||
|
||||
Ok(())
|
||||
|
|
16
src/iterators.rs
Normal file
16
src/iterators.rs
Normal file
|
@ -0,0 +1,16 @@
|
|||
#[cfg(not(target_family = "wasm"))]
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// Shim that calls `par_iter` except for wasm because there's no wasm support
|
||||
/// in rayon yet (there is a shim to be used for the web, but it requires js
|
||||
/// cooperation) Unfortunately, `ParallelIterator` does not implement `Iterator`
|
||||
/// so the signatures diverge
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
pub fn par_iter<T: Sync>(iterable: &[T]) -> impl ParallelIterator<Item = &T> {
|
||||
iterable.par_iter()
|
||||
}
|
||||
|
||||
#[cfg(target_family = "wasm")]
|
||||
pub fn par_iter<T: Sync>(iterable: &[T]) -> impl Iterator<Item = &T> {
|
||||
iterable.iter()
|
||||
}
|
|
@ -58,6 +58,7 @@ pub mod flake8_tidy_imports;
|
|||
mod flake8_unused_arguments;
|
||||
pub mod fs;
|
||||
mod isort;
|
||||
pub mod iterators;
|
||||
mod lex;
|
||||
pub mod linter;
|
||||
pub mod logging;
|
||||
|
@ -73,6 +74,7 @@ mod pygrep_hooks;
|
|||
mod pylint;
|
||||
mod python;
|
||||
mod pyupgrade;
|
||||
pub mod resolver;
|
||||
mod ruff;
|
||||
mod rustpython_helpers;
|
||||
pub mod settings;
|
||||
|
@ -97,8 +99,8 @@ pub fn check(path: &Path, contents: &str, autofix: bool) -> Result<Vec<Check>> {
|
|||
};
|
||||
|
||||
let settings = Settings::from_configuration(
|
||||
Configuration::from_pyproject(pyproject.as_ref(), project_root.as_ref())?,
|
||||
project_root.as_ref(),
|
||||
Configuration::from_pyproject(pyproject.as_ref())?,
|
||||
project_root.as_deref(),
|
||||
)?;
|
||||
|
||||
// Tokenize once.
|
||||
|
|
128
src/main.rs
128
src/main.rs
|
@ -19,12 +19,14 @@ use std::time::Instant;
|
|||
|
||||
use ::ruff::autofix::fixer;
|
||||
use ::ruff::checks::{CheckCode, CheckKind};
|
||||
use ::ruff::cli::{extract_log_level, Cli};
|
||||
use ::ruff::cli::{extract_log_level, Cli, Overrides};
|
||||
use ::ruff::fs::iter_python_files;
|
||||
use ::ruff::iterators::par_iter;
|
||||
use ::ruff::linter::{add_noqa_to_path, autoformat_path, lint_path, lint_stdin, Diagnostics};
|
||||
use ::ruff::logging::{set_up_logging, LogLevel};
|
||||
use ::ruff::message::Message;
|
||||
use ::ruff::printer::Printer;
|
||||
use ::ruff::resolver::Resolver;
|
||||
use ::ruff::settings::configuration::Configuration;
|
||||
use ::ruff::settings::types::SerializationFormat;
|
||||
use ::ruff::settings::{pyproject, Settings};
|
||||
|
@ -38,23 +40,10 @@ use log::{debug, error};
|
|||
use notify::{recommended_watcher, RecursiveMode, Watcher};
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
use rayon::prelude::*;
|
||||
use ruff::resolver::discover_settings;
|
||||
use rustpython_ast::Location;
|
||||
use walkdir::DirEntry;
|
||||
|
||||
/// Shim that calls `par_iter` except for wasm because there's no wasm support
|
||||
/// in rayon yet (there is a shim to be used for the web, but it requires js
|
||||
/// cooperation) Unfortunately, `ParallelIterator` does not implement `Iterator`
|
||||
/// so the signatures diverge
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
fn par_iter<T: Sync>(iterable: &Vec<T>) -> impl ParallelIterator<Item = &T> {
|
||||
iterable.par_iter()
|
||||
}
|
||||
|
||||
#[cfg(target_family = "wasm")]
|
||||
fn par_iter<T: Sync>(iterable: &Vec<T>) -> impl Iterator<Item = &T> {
|
||||
iterable.iter()
|
||||
}
|
||||
|
||||
fn read_from_stdin() -> Result<String> {
|
||||
let mut buffer = String::new();
|
||||
io::stdin().lock().read_to_string(&mut buffer)?;
|
||||
|
@ -74,15 +63,23 @@ fn run_once_stdin(
|
|||
|
||||
fn run_once(
|
||||
files: &[PathBuf],
|
||||
settings: &Settings,
|
||||
default: &Settings,
|
||||
overrides: &Overrides,
|
||||
cache: bool,
|
||||
autofix: &fixer::Mode,
|
||||
) -> Diagnostics {
|
||||
// Discover the settings for the filesystem hierarchy.
|
||||
let settings = discover_settings(files, overrides);
|
||||
let resolver = Resolver {
|
||||
default,
|
||||
settings: &settings,
|
||||
};
|
||||
|
||||
// Collect all the files to check.
|
||||
let start = Instant::now();
|
||||
let paths: Vec<Result<DirEntry, walkdir::Error>> = files
|
||||
.iter()
|
||||
.flat_map(|path| iter_python_files(path, &settings.exclude, &settings.extend_exclude))
|
||||
.flat_map(|path| iter_python_files(path, &resolver))
|
||||
.collect();
|
||||
let duration = start.elapsed();
|
||||
debug!("Identified files to lint in: {:?}", duration);
|
||||
|
@ -93,6 +90,7 @@ fn run_once(
|
|||
match entry {
|
||||
Ok(entry) => {
|
||||
let path = entry.path();
|
||||
let settings = resolver.resolve(path);
|
||||
lint_path(path, settings, &cache.into(), autofix)
|
||||
.map_err(|e| (Some(path.to_owned()), e.to_string()))
|
||||
}
|
||||
|
@ -104,6 +102,7 @@ fn run_once(
|
|||
}
|
||||
.unwrap_or_else(|(path, message)| {
|
||||
if let Some(path) = path {
|
||||
let settings = resolver.resolve(&path);
|
||||
if settings.enabled.contains(&CheckCode::E902) {
|
||||
Diagnostics::new(vec![Message {
|
||||
kind: CheckKind::IOError(message),
|
||||
|
@ -135,12 +134,19 @@ fn run_once(
|
|||
diagnostics
|
||||
}
|
||||
|
||||
fn add_noqa(files: &[PathBuf], settings: &Settings) -> usize {
|
||||
fn add_noqa(files: &[PathBuf], default: &Settings, overrides: &Overrides) -> usize {
|
||||
// Discover the settings for the filesystem hierarchy.
|
||||
let settings = discover_settings(files, overrides);
|
||||
let resolver = Resolver {
|
||||
default,
|
||||
settings: &settings,
|
||||
};
|
||||
|
||||
// Collect all the files to check.
|
||||
let start = Instant::now();
|
||||
let paths: Vec<DirEntry> = files
|
||||
.iter()
|
||||
.flat_map(|path| iter_python_files(path, &settings.exclude, &settings.extend_exclude))
|
||||
.flat_map(|path| iter_python_files(path, &resolver))
|
||||
.flatten()
|
||||
.collect();
|
||||
let duration = start.elapsed();
|
||||
|
@ -150,6 +156,7 @@ fn add_noqa(files: &[PathBuf], settings: &Settings) -> usize {
|
|||
let modifications: usize = par_iter(&paths)
|
||||
.filter_map(|entry| {
|
||||
let path = entry.path();
|
||||
let settings = resolver.resolve(path);
|
||||
match add_noqa_to_path(path, settings) {
|
||||
Ok(count) => Some(count),
|
||||
Err(e) => {
|
||||
|
@ -166,12 +173,19 @@ fn add_noqa(files: &[PathBuf], settings: &Settings) -> usize {
|
|||
modifications
|
||||
}
|
||||
|
||||
fn autoformat(files: &[PathBuf], settings: &Settings) -> usize {
|
||||
fn autoformat(files: &[PathBuf], default: &Settings, overrides: &Overrides) -> usize {
|
||||
// Discover the settings for the filesystem hierarchy.
|
||||
let settings = discover_settings(files, overrides);
|
||||
let resolver = Resolver {
|
||||
default,
|
||||
settings: &settings,
|
||||
};
|
||||
|
||||
// Collect all the files to format.
|
||||
let start = Instant::now();
|
||||
let paths: Vec<DirEntry> = files
|
||||
.iter()
|
||||
.flat_map(|path| iter_python_files(path, &settings.exclude, &settings.extend_exclude))
|
||||
.flat_map(|path| iter_python_files(path, &resolver))
|
||||
.flatten()
|
||||
.collect();
|
||||
let duration = start.elapsed();
|
||||
|
@ -204,29 +218,45 @@ fn inner_main() -> Result<ExitCode> {
|
|||
set_up_logging(&log_level)?;
|
||||
|
||||
if let Some(shell) = cli.generate_shell_completion {
|
||||
shell.generate(&mut Cli::command(), &mut std::io::stdout());
|
||||
shell.generate(&mut Cli::command(), &mut io::stdout());
|
||||
return Ok(ExitCode::SUCCESS);
|
||||
}
|
||||
|
||||
// Find the project root and pyproject.toml.
|
||||
let config: Option<PathBuf> = cli.config;
|
||||
let project_root = config.as_ref().map_or_else(
|
||||
// TODO(charlie): look in the current directory, but respect `--config`.
|
||||
let project_root = cli.config.as_ref().map_or_else(
|
||||
|| pyproject::find_project_root(&cli.files),
|
||||
|config| config.parent().map(fs::normalize_path),
|
||||
);
|
||||
let pyproject = config.or_else(|| pyproject::find_pyproject_toml(project_root.as_ref()));
|
||||
let pyproject = cli
|
||||
.config
|
||||
.or_else(|| pyproject::find_pyproject_toml(project_root.as_ref()));
|
||||
match &project_root {
|
||||
Some(path) => debug!("Found project root at: {:?}", path),
|
||||
None => debug!("Unable to identify project root; assuming current directory..."),
|
||||
};
|
||||
match &pyproject {
|
||||
Some(path) => debug!("Found pyproject.toml at: {:?}", path),
|
||||
None => debug!("Unable to find pyproject.toml; using default settings..."),
|
||||
};
|
||||
|
||||
// Reconcile configuration from pyproject.toml and command-line arguments.
|
||||
let mut configuration =
|
||||
Configuration::from_pyproject(pyproject.as_ref(), project_root.as_ref())?;
|
||||
configuration.merge(overrides);
|
||||
let mut configuration = Configuration::from_pyproject(pyproject.as_ref())?;
|
||||
configuration.merge(&overrides);
|
||||
|
||||
if cli.show_settings && cli.show_files {
|
||||
eprintln!("Error: specify --show-settings or show-files (not both).");
|
||||
return Ok(ExitCode::FAILURE);
|
||||
}
|
||||
|
||||
if cli.show_settings {
|
||||
commands::show_settings(&configuration, project_root.as_ref(), pyproject.as_ref());
|
||||
// TODO(charlie): This would be more useful if required a single file, and told
|
||||
// you the settings used to lint that file.
|
||||
commands::show_settings(
|
||||
&configuration,
|
||||
project_root.as_deref(),
|
||||
pyproject.as_deref(),
|
||||
);
|
||||
return Ok(ExitCode::SUCCESS);
|
||||
}
|
||||
|
||||
|
@ -240,18 +270,7 @@ fn inner_main() -> Result<ExitCode> {
|
|||
};
|
||||
let format = configuration.format;
|
||||
|
||||
let settings = Settings::from_configuration(configuration, project_root.as_ref())?;
|
||||
|
||||
// Now that we've inferred the appropriate log level, add some debug
|
||||
// information.
|
||||
match &project_root {
|
||||
Some(path) => debug!("Found project root at: {:?}", path),
|
||||
None => debug!("Unable to identify project root; assuming current directory..."),
|
||||
};
|
||||
match &pyproject {
|
||||
Some(path) => debug!("Found pyproject.toml at: {:?}", path),
|
||||
None => debug!("Unable to find pyproject.toml; using default settings..."),
|
||||
};
|
||||
let settings = Settings::from_configuration(configuration, project_root.as_deref())?;
|
||||
|
||||
if let Some(code) = cli.explain {
|
||||
commands::explain(&code, format)?;
|
||||
|
@ -259,7 +278,7 @@ fn inner_main() -> Result<ExitCode> {
|
|||
}
|
||||
|
||||
if cli.show_files {
|
||||
commands::show_files(&cli.files, &settings);
|
||||
commands::show_files(&cli.files, &settings, &overrides);
|
||||
return Ok(ExitCode::SUCCESS);
|
||||
}
|
||||
|
||||
|
@ -289,7 +308,13 @@ fn inner_main() -> Result<ExitCode> {
|
|||
printer.clear_screen()?;
|
||||
printer.write_to_user("Starting linter in watch mode...\n");
|
||||
|
||||
let messages = run_once(&cli.files, &settings, cache_enabled, &fixer::Mode::None);
|
||||
let messages = run_once(
|
||||
&cli.files,
|
||||
&settings,
|
||||
&overrides,
|
||||
cache_enabled,
|
||||
&fixer::Mode::None,
|
||||
);
|
||||
printer.write_continuously(&messages)?;
|
||||
|
||||
// Configure the file watcher.
|
||||
|
@ -305,15 +330,20 @@ fn inner_main() -> Result<ExitCode> {
|
|||
let paths = e?.paths;
|
||||
let py_changed = paths.iter().any(|p| {
|
||||
p.extension()
|
||||
.map(|ext| ext.eq_ignore_ascii_case("py"))
|
||||
.map(|ext| ext == "py" || ext == "pyi")
|
||||
.unwrap_or_default()
|
||||
});
|
||||
if py_changed {
|
||||
printer.clear_screen()?;
|
||||
printer.write_to_user("File change detected...\n");
|
||||
|
||||
let messages =
|
||||
run_once(&cli.files, &settings, cache_enabled, &fixer::Mode::None);
|
||||
let messages = run_once(
|
||||
&cli.files,
|
||||
&settings,
|
||||
&overrides,
|
||||
cache_enabled,
|
||||
&fixer::Mode::None,
|
||||
);
|
||||
printer.write_continuously(&messages)?;
|
||||
}
|
||||
}
|
||||
|
@ -321,12 +351,12 @@ fn inner_main() -> Result<ExitCode> {
|
|||
}
|
||||
}
|
||||
} else if cli.add_noqa {
|
||||
let modifications = add_noqa(&cli.files, &settings);
|
||||
let modifications = add_noqa(&cli.files, &settings, &overrides);
|
||||
if modifications > 0 && log_level >= LogLevel::Default {
|
||||
println!("Added {modifications} noqa directives.");
|
||||
}
|
||||
} else if cli.autoformat {
|
||||
let modifications = autoformat(&cli.files, &settings);
|
||||
let modifications = autoformat(&cli.files, &settings, &overrides);
|
||||
if modifications > 0 && log_level >= LogLevel::Default {
|
||||
println!("Formatted {modifications} files.");
|
||||
}
|
||||
|
@ -339,7 +369,7 @@ fn inner_main() -> Result<ExitCode> {
|
|||
let path = Path::new(&filename);
|
||||
run_once_stdin(&settings, path, &fix)?
|
||||
} else {
|
||||
run_once(&cli.files, &settings, cache_enabled, &fix)
|
||||
run_once(&cli.files, &settings, &overrides, cache_enabled, &fix)
|
||||
};
|
||||
|
||||
// Always try to print violations (the printer itself may suppress output),
|
||||
|
|
62
src/resolver.rs
Normal file
62
src/resolver.rs
Normal file
|
@ -0,0 +1,62 @@
|
|||
//! Discover and resolve `Settings` from the filesystem hierarchy.
|
||||
|
||||
use std::cmp::Reverse;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use log::error;
|
||||
|
||||
use crate::cli::Overrides;
|
||||
use crate::fs::iter_pyproject_files;
|
||||
use crate::settings::configuration::Configuration;
|
||||
use crate::settings::{pyproject, Settings};
|
||||
|
||||
pub struct Resolver<'a> {
|
||||
pub settings: &'a [(PathBuf, Settings)],
|
||||
pub default: &'a Settings,
|
||||
}
|
||||
|
||||
impl<'a> Resolver<'a> {
|
||||
pub fn resolve(&'a self, path: &Path) -> &'a Settings {
|
||||
self.settings
|
||||
.iter()
|
||||
.find(|(root, _)| path.starts_with(root))
|
||||
.map_or(self.default, |(_, settings)| settings)
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the `Settings` from a given `pyproject.toml`.
|
||||
pub fn settings_for_path(pyproject: &Path, overrides: &Overrides) -> Result<(PathBuf, Settings)> {
|
||||
let project_root = pyproject
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow!("Expected pyproject.toml to be in a directory"))?
|
||||
.to_path_buf();
|
||||
let options = pyproject::load_options(pyproject)?;
|
||||
let mut configuration = Configuration::from_options(options)?;
|
||||
configuration.merge(overrides);
|
||||
let settings = Settings::from_configuration(configuration, Some(&project_root))?;
|
||||
Ok((project_root, settings))
|
||||
}
|
||||
|
||||
/// Discover all `Settings` objects within the relevant filesystem hierarchy.
|
||||
pub fn discover_settings(files: &[PathBuf], overrides: &Overrides) -> Vec<(PathBuf, Settings)> {
|
||||
// Collect all `pyproject.toml` files.
|
||||
let mut pyprojects: Vec<PathBuf> = files
|
||||
.iter()
|
||||
.flat_map(|path| iter_pyproject_files(path))
|
||||
.collect();
|
||||
pyprojects.sort_unstable_by_key(|path| Reverse(path.to_string_lossy().len()));
|
||||
pyprojects.dedup();
|
||||
|
||||
// Read every `pyproject.toml`.
|
||||
pyprojects
|
||||
.into_iter()
|
||||
.filter_map(|pyproject| match settings_for_path(&pyproject, overrides) {
|
||||
Ok((project_root, settings)) => Some((project_root, settings)),
|
||||
Err(error) => {
|
||||
error!("Failed to read settings: {error}");
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
|
@ -5,18 +5,19 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use log::debug;
|
||||
use once_cell::sync::Lazy;
|
||||
use path_absolutize::path_dedot;
|
||||
use regex::Regex;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::checks_gen::{CheckCodePrefix, CATEGORIES};
|
||||
use crate::cli::{collect_per_file_ignores, Overrides};
|
||||
use crate::settings::options::Options;
|
||||
use crate::settings::pyproject::load_options;
|
||||
use crate::settings::types::{FilePattern, PerFileIgnore, PythonVersion, SerializationFormat};
|
||||
use crate::{
|
||||
flake8_annotations, flake8_bugbear, flake8_import_conventions, flake8_quotes,
|
||||
flake8_tidy_imports, fs, isort, mccabe, pep8_naming, pyupgrade,
|
||||
flake8_tidy_imports, isort, mccabe, pep8_naming, pyupgrade,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -80,11 +81,18 @@ static DEFAULT_DUMMY_VARIABLE_RGX: Lazy<Regex> =
|
|||
Lazy::new(|| Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap());
|
||||
|
||||
impl Configuration {
|
||||
pub fn from_pyproject(
|
||||
pyproject: Option<&PathBuf>,
|
||||
project_root: Option<&PathBuf>,
|
||||
) -> Result<Self> {
|
||||
let options = load_options(pyproject)?;
|
||||
pub fn from_pyproject(pyproject: Option<&PathBuf>) -> Result<Self> {
|
||||
Self::from_options(pyproject.map_or_else(
|
||||
|| {
|
||||
debug!("No pyproject.toml found.");
|
||||
debug!("Falling back to default configuration...");
|
||||
Ok(Options::default())
|
||||
},
|
||||
|path| load_options(path),
|
||||
)?)
|
||||
}
|
||||
|
||||
pub fn from_options(options: Options) -> Result<Self> {
|
||||
Ok(Configuration {
|
||||
allowed_confusables: FxHashSet::from_iter(
|
||||
options.allowed_confusables.unwrap_or_default(),
|
||||
|
@ -95,21 +103,10 @@ impl Configuration {
|
|||
None => DEFAULT_DUMMY_VARIABLE_RGX.clone(),
|
||||
},
|
||||
src: options.src.map_or_else(
|
||||
|| {
|
||||
vec![match project_root {
|
||||
Some(project_root) => project_root.clone(),
|
||||
None => path_dedot::CWD.clone(),
|
||||
}]
|
||||
},
|
||||
|| vec![Path::new(".").to_path_buf()],
|
||||
|src| {
|
||||
src.iter()
|
||||
.map(|path| {
|
||||
let path = Path::new(path);
|
||||
match project_root {
|
||||
Some(project_root) => fs::normalize_path_to(path, project_root),
|
||||
None => fs::normalize_path(path),
|
||||
}
|
||||
})
|
||||
.map(|path| Path::new(path).to_path_buf())
|
||||
.collect()
|
||||
},
|
||||
),
|
||||
|
@ -187,54 +184,54 @@ impl Configuration {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn merge(&mut self, overrides: Overrides) {
|
||||
if let Some(dummy_variable_rgx) = overrides.dummy_variable_rgx {
|
||||
self.dummy_variable_rgx = dummy_variable_rgx;
|
||||
pub fn merge(&mut self, overrides: &Overrides) {
|
||||
if let Some(dummy_variable_rgx) = &overrides.dummy_variable_rgx {
|
||||
self.dummy_variable_rgx = dummy_variable_rgx.clone();
|
||||
}
|
||||
if let Some(exclude) = overrides.exclude {
|
||||
self.exclude = exclude;
|
||||
if let Some(exclude) = &overrides.exclude {
|
||||
self.exclude = exclude.clone();
|
||||
}
|
||||
if let Some(extend_exclude) = overrides.extend_exclude {
|
||||
self.extend_exclude = extend_exclude;
|
||||
if let Some(extend_exclude) = &overrides.extend_exclude {
|
||||
self.extend_exclude = extend_exclude.clone();
|
||||
}
|
||||
if let Some(extend_ignore) = overrides.extend_ignore {
|
||||
self.extend_ignore = extend_ignore;
|
||||
if let Some(extend_ignore) = &overrides.extend_ignore {
|
||||
self.extend_ignore = extend_ignore.clone();
|
||||
}
|
||||
if let Some(extend_select) = overrides.extend_select {
|
||||
self.extend_select = extend_select;
|
||||
if let Some(extend_select) = &overrides.extend_select {
|
||||
self.extend_select = extend_select.clone();
|
||||
}
|
||||
if let Some(fix) = overrides.fix {
|
||||
self.fix = fix;
|
||||
if let Some(fix) = &overrides.fix {
|
||||
self.fix = *fix;
|
||||
}
|
||||
if let Some(fixable) = overrides.fixable {
|
||||
self.fixable = fixable;
|
||||
if let Some(fixable) = &overrides.fixable {
|
||||
self.fixable = fixable.clone();
|
||||
}
|
||||
if let Some(format) = overrides.format {
|
||||
self.format = format;
|
||||
if let Some(format) = &overrides.format {
|
||||
self.format = *format;
|
||||
}
|
||||
if let Some(ignore) = overrides.ignore {
|
||||
self.ignore = ignore;
|
||||
if let Some(ignore) = &overrides.ignore {
|
||||
self.ignore = ignore.clone();
|
||||
}
|
||||
if let Some(line_length) = overrides.line_length {
|
||||
self.line_length = line_length;
|
||||
if let Some(line_length) = &overrides.line_length {
|
||||
self.line_length = *line_length;
|
||||
}
|
||||
if let Some(max_complexity) = overrides.max_complexity {
|
||||
self.mccabe.max_complexity = max_complexity;
|
||||
if let Some(max_complexity) = &overrides.max_complexity {
|
||||
self.mccabe.max_complexity = *max_complexity;
|
||||
}
|
||||
if let Some(per_file_ignores) = overrides.per_file_ignores {
|
||||
self.per_file_ignores = collect_per_file_ignores(per_file_ignores);
|
||||
if let Some(per_file_ignores) = &overrides.per_file_ignores {
|
||||
self.per_file_ignores = collect_per_file_ignores(per_file_ignores.clone());
|
||||
}
|
||||
if let Some(select) = overrides.select {
|
||||
self.select = select;
|
||||
if let Some(select) = &overrides.select {
|
||||
self.select = select.clone();
|
||||
}
|
||||
if let Some(show_source) = overrides.show_source {
|
||||
self.show_source = show_source;
|
||||
if let Some(show_source) = &overrides.show_source {
|
||||
self.show_source = *show_source;
|
||||
}
|
||||
if let Some(target_version) = overrides.target_version {
|
||||
self.target_version = target_version;
|
||||
if let Some(target_version) = &overrides.target_version {
|
||||
self.target_version = *target_version;
|
||||
}
|
||||
if let Some(unfixable) = overrides.unfixable {
|
||||
self.unfixable = unfixable;
|
||||
if let Some(unfixable) = &overrides.unfixable {
|
||||
self.unfixable = unfixable.clone();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,10 +55,7 @@ pub struct Settings {
|
|||
}
|
||||
|
||||
impl Settings {
|
||||
pub fn from_configuration(
|
||||
config: Configuration,
|
||||
project_root: Option<&PathBuf>,
|
||||
) -> Result<Self> {
|
||||
pub fn from_configuration(config: Configuration, project_root: Option<&Path>) -> Result<Self> {
|
||||
Ok(Self {
|
||||
allowed_confusables: config.allowed_confusables,
|
||||
dummy_variable_rgx: config.dummy_variable_rgx,
|
||||
|
@ -90,7 +87,7 @@ impl Settings {
|
|||
pep8_naming: config.pep8_naming,
|
||||
pyupgrade: config.pyupgrade,
|
||||
per_file_ignores: resolve_per_file_ignores(config.per_file_ignores, project_root)?,
|
||||
src: config.src,
|
||||
src: resolve_src(config.src, project_root),
|
||||
target_version: config.target_version,
|
||||
show_source: config.show_source,
|
||||
})
|
||||
|
@ -192,10 +189,7 @@ impl Hash for Settings {
|
|||
}
|
||||
|
||||
/// Given a list of patterns, create a `GlobSet`.
|
||||
pub fn resolve_globset(
|
||||
patterns: Vec<FilePattern>,
|
||||
project_root: Option<&PathBuf>,
|
||||
) -> Result<GlobSet> {
|
||||
pub fn resolve_globset(patterns: Vec<FilePattern>, project_root: Option<&Path>) -> Result<GlobSet> {
|
||||
let mut builder = globset::GlobSetBuilder::new();
|
||||
for pattern in patterns {
|
||||
pattern.add_to(&mut builder, project_root)?;
|
||||
|
@ -206,7 +200,7 @@ pub fn resolve_globset(
|
|||
/// Given a list of patterns, create a `GlobSet`.
|
||||
pub fn resolve_per_file_ignores(
|
||||
per_file_ignores: Vec<PerFileIgnore>,
|
||||
project_root: Option<&PathBuf>,
|
||||
project_root: Option<&Path>,
|
||||
) -> Result<Vec<(GlobMatcher, GlobMatcher, FxHashSet<CheckCode>)>> {
|
||||
per_file_ignores
|
||||
.into_iter()
|
||||
|
@ -227,6 +221,15 @@ pub fn resolve_per_file_ignores(
|
|||
.collect()
|
||||
}
|
||||
|
||||
pub fn resolve_src(src: Vec<PathBuf>, project_root: Option<&Path>) -> Vec<PathBuf> {
|
||||
src.into_iter()
|
||||
.map(|path| match project_root {
|
||||
Some(project_root) => fs::normalize_path_to(&path, project_root),
|
||||
None => fs::normalize_path(&path),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Given a set of selected and ignored prefixes, resolve the set of enabled
|
||||
/// error codes.
|
||||
fn resolve_codes(select: &[CheckCodePrefix], ignore: &[CheckCodePrefix]) -> FxHashSet<CheckCode> {
|
||||
|
|
|
@ -4,7 +4,6 @@ use std::path::{Path, PathBuf};
|
|||
|
||||
use anyhow::{anyhow, Result};
|
||||
use common_path::common_path_all;
|
||||
use log::debug;
|
||||
use path_absolutize::Absolutize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
|
@ -80,18 +79,12 @@ pub fn find_project_root(sources: &[PathBuf]) -> Option<PathBuf> {
|
|||
None
|
||||
}
|
||||
|
||||
pub fn load_options(pyproject: Option<&PathBuf>) -> Result<Options> {
|
||||
if let Some(pyproject) = pyproject {
|
||||
pub fn load_options(pyproject: &Path) -> Result<Options> {
|
||||
Ok(parse_pyproject_toml(pyproject)
|
||||
.map_err(|err| anyhow!("Failed to parse `{}`: {}", pyproject.to_string_lossy(), err))?
|
||||
.tool
|
||||
.and_then(|tool| tool.ruff)
|
||||
.unwrap_or_default())
|
||||
} else {
|
||||
debug!("No pyproject.toml found.");
|
||||
debug!("Falling back to default configuration...");
|
||||
Ok(Options::default())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::env;
|
||||
use std::hash::Hash;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
|
@ -53,11 +53,7 @@ pub enum FilePattern {
|
|||
}
|
||||
|
||||
impl FilePattern {
|
||||
pub fn add_to(
|
||||
self,
|
||||
builder: &mut GlobSetBuilder,
|
||||
project_root: Option<&PathBuf>,
|
||||
) -> Result<()> {
|
||||
pub fn add_to(self, builder: &mut GlobSetBuilder, project_root: Option<&Path>) -> Result<()> {
|
||||
match self {
|
||||
FilePattern::Builtin(pattern) => {
|
||||
builder.add(Glob::from_str(pattern)?);
|
||||
|
|
5
test_project/examples/docs_snippets/docs_snippets/celery_job.py
Executable file
5
test_project/examples/docs_snippets/docs_snippets/celery_job.py
Executable file
|
@ -0,0 +1,5 @@
|
|||
import os
|
||||
|
||||
|
||||
def f():
|
||||
x = 1
|
|
@ -0,0 +1,5 @@
|
|||
import os
|
||||
|
||||
|
||||
def f():
|
||||
x = 1
|
4
test_project/examples/docs_snippets/pyproject.toml
Normal file
4
test_project/examples/docs_snippets/pyproject.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
[tool.ruff]
|
||||
src = ["docs_snippets"]
|
||||
extend-ignore = ["F401"]
|
||||
extend-exclude = ["./docs_snippets/concepts/ignore.py"]
|
2
test_project/pyproject.toml
Normal file
2
test_project/pyproject.toml
Normal file
|
@ -0,0 +1,2 @@
|
|||
[tool.ruff]
|
||||
src = ["src"]
|
0
test_project/src/__init__.py
Normal file
0
test_project/src/__init__.py
Normal file
5
test_project/src/core.py
Normal file
5
test_project/src/core.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
import os
|
||||
|
||||
|
||||
def f():
|
||||
x = 1
|
Loading…
Add table
Add a link
Reference in a new issue