Move Configuration to ruff_workspace crate (#6920)

This commit is contained in:
Micha Reiser 2023-08-28 08:21:35 +02:00 committed by GitHub
parent 039694aaed
commit a6aa16630d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
77 changed files with 3704 additions and 4108 deletions

38
Cargo.lock generated
View file

@ -840,9 +840,14 @@ dependencies = [
"clap",
"colored",
"configparser",
"itertools",
"log",
"once_cell",
"pep440_rs",
"pretty_assertions",
"regex",
"ruff",
"ruff_workspace",
"rustc-hash",
"serde",
"serde_json",
@ -2096,11 +2101,9 @@ dependencies = [
"chrono",
"clap",
"colored",
"dirs 5.0.1",
"fern",
"glob",
"globset",
"ignore",
"imperative",
"insta",
"is-macro",
@ -2140,7 +2143,6 @@ dependencies = [
"serde",
"serde_json",
"serde_with",
"shellexpand",
"similar",
"smallvec",
"strum",
@ -2227,6 +2229,7 @@ dependencies = [
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",
"ruff_workspace",
"rustc-hash",
"serde",
"serde_json",
@ -2269,6 +2272,7 @@ dependencies = [
"ruff_python_parser",
"ruff_python_stdlib",
"ruff_python_trivia",
"ruff_workspace",
"schemars",
"serde",
"serde_json",
@ -2535,12 +2539,40 @@ dependencies = [
"ruff_python_parser",
"ruff_source_file",
"ruff_text_size",
"ruff_workspace",
"serde",
"serde-wasm-bindgen",
"wasm-bindgen",
"wasm-bindgen-test",
]
[[package]]
name = "ruff_workspace"
version = "0.0.0"
dependencies = [
"anyhow",
"colored",
"dirs 5.0.1",
"glob",
"globset",
"ignore",
"itertools",
"log",
"path-absolutize",
"pep440_rs",
"regex",
"ruff",
"ruff_cache",
"ruff_macros",
"rustc-hash",
"schemars",
"serde",
"shellexpand",
"strum",
"tempfile",
"toml",
]
[[package]]
name = "rust-stemmers"
version = "1.2.0"

View file

@ -14,12 +14,16 @@ license = { workspace = true }
[dependencies]
ruff = { path = "../ruff", default-features = false }
ruff_workspace = { path = "../ruff_workspace" }
anyhow = { workspace = true }
clap = { workspace = true }
colored = { workspace = true }
configparser = { version = "3.0.2" }
itertools = { workspace = true }
log = { workspace = true }
once_cell = { workspace = true }
pep440_rs = { version = "0.3.1", features = ["serde"] }
regex = { workspace = true }
rustc-hash = { workspace = true }
serde = { workspace = true }
@ -27,3 +31,6 @@ serde_json = { workspace = true }
strum = { workspace = true }
strum_macros = { workspace = true }
toml = { workspace = true }
[dev-dependencies]
pretty_assertions = "1.3.0"

View file

@ -1,13 +1,12 @@
//! Extract Black configuration settings from a pyproject.toml.
use ruff::settings::types::PythonVersion;
use serde::{Deserialize, Serialize};
use crate::settings::types::PythonVersion;
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct Black {
pub(crate) struct Black {
#[serde(alias = "line-length", alias = "line_length")]
pub line_length: Option<usize>,
pub(crate) line_length: Option<usize>,
#[serde(alias = "target-version", alias = "target_version")]
pub target_version: Option<Vec<PythonVersion>>,
pub(crate) target_version: Option<Vec<PythonVersion>>,
}

View file

@ -1,25 +1,24 @@
use std::collections::{HashMap, HashSet};
use anyhow::Result;
use itertools::Itertools;
use crate::line_width::LineLength;
use crate::registry::Linter;
use crate::rule_selector::RuleSelector;
use crate::rules::flake8_pytest_style::types::{
use ruff::line_width::LineLength;
use ruff::registry::Linter;
use ruff::rule_selector::RuleSelector;
use ruff::rules::flake8_pytest_style::types::{
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
};
use crate::rules::flake8_quotes::settings::Quote;
use crate::rules::flake8_tidy_imports::settings::Strictness;
use crate::rules::pydocstyle::settings::Convention;
use crate::rules::{
flake8_annotations, flake8_bugbear, flake8_builtins, flake8_errmsg, flake8_pytest_style,
flake8_quotes, flake8_tidy_imports, mccabe, pep8_naming, pydocstyle,
use ruff::rules::flake8_quotes::settings::Quote;
use ruff::rules::flake8_tidy_imports::settings::Strictness;
use ruff::rules::pydocstyle::settings::Convention;
use ruff::settings::types::PythonVersion;
use ruff::warn_user;
use ruff_workspace::options::{
Flake8AnnotationsOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, Flake8ErrMsgOptions,
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, McCabeOptions,
Options, Pep8NamingOptions, PydocstyleOptions,
};
use crate::settings::options::Options;
use crate::settings::pyproject::Pyproject;
use crate::settings::types::PythonVersion;
use crate::warn_user;
use ruff_workspace::pyproject::Pyproject;
use super::external_config::ExternalConfig;
use super::plugin::Plugin;
@ -30,11 +29,11 @@ const DEFAULT_SELECTORS: &[RuleSelector] = &[
RuleSelector::Linter(Linter::Pycodestyle),
];
pub fn convert(
pub(crate) fn convert(
config: &HashMap<String, HashMap<String, Option<String>>>,
external_config: &ExternalConfig,
plugins: Option<Vec<Plugin>>,
) -> Result<Pyproject> {
) -> Pyproject {
// Extract the Flake8 section.
let flake8 = config
.get("flake8")
@ -103,16 +102,16 @@ pub fn convert(
// Parse each supported option.
let mut options = Options::default();
let mut flake8_annotations = flake8_annotations::settings::Options::default();
let mut flake8_bugbear = flake8_bugbear::settings::Options::default();
let mut flake8_builtins = flake8_builtins::settings::Options::default();
let mut flake8_errmsg = flake8_errmsg::settings::Options::default();
let mut flake8_pytest_style = flake8_pytest_style::settings::Options::default();
let mut flake8_quotes = flake8_quotes::settings::Options::default();
let mut flake8_tidy_imports = flake8_tidy_imports::options::Options::default();
let mut mccabe = mccabe::settings::Options::default();
let mut pep8_naming = pep8_naming::settings::Options::default();
let mut pydocstyle = pydocstyle::settings::Options::default();
let mut flake8_annotations = Flake8AnnotationsOptions::default();
let mut flake8_bugbear = Flake8BugbearOptions::default();
let mut flake8_builtins = Flake8BuiltinsOptions::default();
let mut flake8_errmsg = Flake8ErrMsgOptions::default();
let mut flake8_pytest_style = Flake8PytestStyleOptions::default();
let mut flake8_quotes = Flake8QuotesOptions::default();
let mut flake8_tidy_imports = Flake8TidyImportsOptions::default();
let mut mccabe = McCabeOptions::default();
let mut pep8_naming = Pep8NamingOptions::default();
let mut pydocstyle = PydocstyleOptions::default();
for (key, value) in flake8 {
if let Some(value) = value {
match key.as_str() {
@ -372,34 +371,34 @@ pub fn convert(
.sorted_by_key(RuleSelector::prefix_and_code)
.collect(),
);
if flake8_annotations != flake8_annotations::settings::Options::default() {
if flake8_annotations != Flake8AnnotationsOptions::default() {
options.flake8_annotations = Some(flake8_annotations);
}
if flake8_bugbear != flake8_bugbear::settings::Options::default() {
if flake8_bugbear != Flake8BugbearOptions::default() {
options.flake8_bugbear = Some(flake8_bugbear);
}
if flake8_builtins != flake8_builtins::settings::Options::default() {
if flake8_builtins != Flake8BuiltinsOptions::default() {
options.flake8_builtins = Some(flake8_builtins);
}
if flake8_errmsg != flake8_errmsg::settings::Options::default() {
if flake8_errmsg != Flake8ErrMsgOptions::default() {
options.flake8_errmsg = Some(flake8_errmsg);
}
if flake8_pytest_style != flake8_pytest_style::settings::Options::default() {
if flake8_pytest_style != Flake8PytestStyleOptions::default() {
options.flake8_pytest_style = Some(flake8_pytest_style);
}
if flake8_quotes != flake8_quotes::settings::Options::default() {
if flake8_quotes != Flake8QuotesOptions::default() {
options.flake8_quotes = Some(flake8_quotes);
}
if flake8_tidy_imports != flake8_tidy_imports::options::Options::default() {
if flake8_tidy_imports != Flake8TidyImportsOptions::default() {
options.flake8_tidy_imports = Some(flake8_tidy_imports);
}
if mccabe != mccabe::settings::Options::default() {
if mccabe != McCabeOptions::default() {
options.mccabe = Some(mccabe);
}
if pep8_naming != pep8_naming::settings::Options::default() {
if pep8_naming != Pep8NamingOptions::default() {
options.pep8_naming = Some(pep8_naming);
}
if pydocstyle != pydocstyle::settings::Options::default() {
if pydocstyle != PydocstyleOptions::default() {
options.pydocstyle = Some(pydocstyle);
}
@ -439,7 +438,7 @@ pub fn convert(
}
// Create the pyproject.toml.
Ok(Pyproject::new(options))
Pyproject::new(options)
}
/// Resolve the set of enabled `RuleSelector` values for the given
@ -458,19 +457,20 @@ mod tests {
use anyhow::Result;
use itertools::Itertools;
use pep440_rs::VersionSpecifiers;
use pretty_assertions::assert_eq;
use crate::flake8_to_ruff::converter::DEFAULT_SELECTORS;
use crate::flake8_to_ruff::pep621::Project;
use crate::flake8_to_ruff::ExternalConfig;
use crate::line_width::LineLength;
use crate::registry::Linter;
use crate::rule_selector::RuleSelector;
use crate::rules::pydocstyle::settings::Convention;
use crate::rules::{flake8_quotes, pydocstyle};
use crate::settings::options::Options;
use crate::settings::pyproject::Pyproject;
use crate::settings::types::PythonVersion;
use pretty_assertions::assert_eq;
use ruff::line_width::LineLength;
use ruff::registry::Linter;
use ruff::rule_selector::RuleSelector;
use ruff::rules::flake8_quotes;
use ruff::rules::pydocstyle::settings::Convention;
use ruff::settings::types::PythonVersion;
use ruff_workspace::options::{Flake8QuotesOptions, Options, PydocstyleOptions};
use ruff_workspace::pyproject::Pyproject;
use crate::converter::DEFAULT_SELECTORS;
use crate::pep621::Project;
use crate::ExternalConfig;
use super::super::plugin::Plugin;
use super::convert;
@ -491,20 +491,18 @@ mod tests {
}
#[test]
fn it_converts_empty() -> Result<()> {
fn it_converts_empty() {
let actual = convert(
&HashMap::from([("flake8".to_string(), HashMap::default())]),
&ExternalConfig::default(),
None,
)?;
);
let expected = Pyproject::new(default_options([]));
assert_eq!(actual, expected);
Ok(())
}
#[test]
fn it_converts_dashes() -> Result<()> {
fn it_converts_dashes() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
@ -512,18 +510,16 @@ mod tests {
)]),
&ExternalConfig::default(),
Some(vec![]),
)?;
);
let expected = Pyproject::new(Options {
line_length: Some(LineLength::from(100)),
..default_options([])
});
assert_eq!(actual, expected);
Ok(())
}
#[test]
fn it_converts_underscores() -> Result<()> {
fn it_converts_underscores() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
@ -531,18 +527,16 @@ mod tests {
)]),
&ExternalConfig::default(),
Some(vec![]),
)?;
);
let expected = Pyproject::new(Options {
line_length: Some(LineLength::from(100)),
..default_options([])
});
assert_eq!(actual, expected);
Ok(())
}
#[test]
fn it_ignores_parse_errors() -> Result<()> {
fn it_ignores_parse_errors() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
@ -550,15 +544,13 @@ mod tests {
)]),
&ExternalConfig::default(),
Some(vec![]),
)?;
);
let expected = Pyproject::new(default_options([]));
assert_eq!(actual, expected);
Ok(())
}
#[test]
fn it_converts_plugin_options() -> Result<()> {
fn it_converts_plugin_options() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
@ -566,9 +558,9 @@ mod tests {
)]),
&ExternalConfig::default(),
Some(vec![]),
)?;
);
let expected = Pyproject::new(Options {
flake8_quotes: Some(flake8_quotes::settings::Options {
flake8_quotes: Some(Flake8QuotesOptions {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
docstring_quotes: None,
@ -577,12 +569,10 @@ mod tests {
..default_options([])
});
assert_eq!(actual, expected);
Ok(())
}
#[test]
fn it_converts_docstring_conventions() -> Result<()> {
fn it_converts_docstring_conventions() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
@ -593,9 +583,9 @@ mod tests {
)]),
&ExternalConfig::default(),
Some(vec![Plugin::Flake8Docstrings]),
)?;
);
let expected = Pyproject::new(Options {
pydocstyle: Some(pydocstyle::settings::Options {
pydocstyle: Some(PydocstyleOptions {
convention: Some(Convention::Numpy),
ignore_decorators: None,
property_decorators: None,
@ -603,12 +593,10 @@ mod tests {
..default_options([Linter::Pydocstyle.into()])
});
assert_eq!(actual, expected);
Ok(())
}
#[test]
fn it_infers_plugins_if_omitted() -> Result<()> {
fn it_infers_plugins_if_omitted() {
let actual = convert(
&HashMap::from([(
"flake8".to_string(),
@ -616,9 +604,9 @@ mod tests {
)]),
&ExternalConfig::default(),
None,
)?;
);
let expected = Pyproject::new(Options {
flake8_quotes: Some(flake8_quotes::settings::Options {
flake8_quotes: Some(Flake8QuotesOptions {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
docstring_quotes: None,
@ -627,8 +615,6 @@ mod tests {
..default_options([Linter::Flake8Quotes.into()])
});
assert_eq!(actual, expected);
Ok(())
}
#[test]
@ -642,7 +628,7 @@ mod tests {
..ExternalConfig::default()
},
Some(vec![]),
)?;
);
let expected = Pyproject::new(Options {
target_version: Some(PythonVersion::Py38),
..default_options([])

View file

@ -0,0 +1,10 @@
use super::black::Black;
use super::isort::Isort;
use super::pep621::Project;
#[derive(Default)]
pub(crate) struct ExternalConfig<'a> {
pub(crate) black: Option<&'a Black>,
pub(crate) isort: Option<&'a Isort>,
pub(crate) project: Option<&'a Project>,
}

View file

@ -4,7 +4,7 @@ use serde::{Deserialize, Serialize};
/// The [isort configuration](https://pycqa.github.io/isort/docs/configuration/config_files.html).
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct Isort {
pub(crate) struct Isort {
#[serde(alias = "src-paths", alias = "src_paths")]
pub src_paths: Option<Vec<String>>,
pub(crate) src_paths: Option<Vec<String>>,
}

View file

@ -1,12 +1,24 @@
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
mod black;
mod converter;
mod external_config;
mod isort;
mod parser;
mod pep621;
mod plugin;
mod pyproject;
use std::path::PathBuf;
use anyhow::Result;
use clap::Parser;
use configparser::ini::Ini;
use ruff::flake8_to_ruff::{self, ExternalConfig};
use crate::converter::convert;
use crate::external_config::ExternalConfig;
use crate::plugin::Plugin;
use crate::pyproject::parse;
use ruff::logging::{set_up_logging, LogLevel};
#[derive(Parser)]
@ -25,7 +37,7 @@ struct Args {
pyproject: Option<PathBuf>,
/// List of plugins to enable.
#[arg(long, value_delimiter = ',')]
plugin: Option<Vec<flake8_to_ruff::Plugin>>,
plugin: Option<Vec<Plugin>>,
}
fn main() -> Result<()> {
@ -39,7 +51,7 @@ fn main() -> Result<()> {
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
// Read the pyproject.toml file.
let pyproject = args.pyproject.map(flake8_to_ruff::parse).transpose()?;
let pyproject = args.pyproject.map(parse).transpose()?;
let external_config = pyproject
.as_ref()
.and_then(|pyproject| pyproject.tool.as_ref())
@ -57,7 +69,7 @@ fn main() -> Result<()> {
};
// Create Ruff's pyproject.toml section.
let pyproject = flake8_to_ruff::convert(&config, &external_config, args.plugin)?;
let pyproject = convert(&config, &external_config, args.plugin);
#[allow(clippy::print_stdout)]
{

View file

@ -3,12 +3,10 @@ use std::str::FromStr;
use anyhow::{bail, Result};
use once_cell::sync::Lazy;
use regex::Regex;
use ruff::settings::types::PatternPrefixPair;
use ruff::{warn_user, RuleSelector};
use rustc_hash::FxHashMap;
use crate::rule_selector::RuleSelector;
use crate::settings::types::PatternPrefixPair;
use crate::warn_user;
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
/// Parse a comma-separated list of `RuleSelector` values (e.g.,
@ -194,11 +192,11 @@ pub(crate) fn collect_per_file_ignores(
#[cfg(test)]
mod tests {
use anyhow::Result;
use ruff::RuleSelector;
use crate::codes;
use crate::registry::Linter;
use crate::rule_selector::RuleSelector;
use crate::settings::types::PatternPrefixPair;
use ruff::codes;
use ruff::registry::Linter;
use ruff::settings::types::PatternPrefixPair;
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};

View file

@ -4,7 +4,7 @@ use pep440_rs::VersionSpecifiers;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct Project {
pub(crate) struct Project {
#[serde(alias = "requires-python", alias = "requires_python")]
pub requires_python: Option<VersionSpecifiers>,
pub(crate) requires_python: Option<VersionSpecifiers>,
}

View file

@ -3,9 +3,8 @@ use std::fmt;
use std::str::FromStr;
use anyhow::anyhow;
use crate::registry::Linter;
use crate::rule_selector::RuleSelector;
use ruff::registry::Linter;
use ruff::RuleSelector;
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum Plugin {

View file

@ -8,18 +8,18 @@ use super::isort::Isort;
use super::pep621::Project;
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Tools {
pub black: Option<Black>,
pub isort: Option<Isort>,
pub(crate) struct Tools {
pub(crate) black: Option<Black>,
pub(crate) isort: Option<Isort>,
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Pyproject {
pub tool: Option<Tools>,
pub project: Option<Project>,
pub(crate) struct Pyproject {
pub(crate) tool: Option<Tools>,
pub(crate) project: Option<Project>,
}
pub fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
pub(crate) fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
let contents = std::fs::read_to_string(path)?;
let pyproject = toml::from_str::<Pyproject>(&contents)?;
Ok(pyproject)

View file

@ -36,11 +36,9 @@ bitflags = { workspace = true }
chrono = { workspace = true }
clap = { workspace = true, features = ["derive", "string"], optional = true }
colored = { workspace = true }
dirs = { version = "5.0.0" }
fern = { version = "0.6.1" }
glob = { workspace = true }
globset = { workspace = true }
ignore = { workspace = true }
imperative = { version = "1.0.4" }
is-macro = { workspace = true }
itertools = { workspace = true }
@ -68,7 +66,6 @@ serde = { workspace = true }
serde_json = { workspace = true }
serde_with = { version = "3.0.0" }
similar = { workspace = true }
shellexpand = { workspace = true }
smallvec = { workspace = true }
strum = { workspace = true }
strum_macros = { workspace = true }

View file

@ -1,10 +0,0 @@
use super::black::Black;
use super::isort::Isort;
use super::pep621::Project;
#[derive(Default)]
pub struct ExternalConfig<'a> {
pub black: Option<&'a Black>,
pub isort: Option<&'a Isort>,
pub project: Option<&'a Project>,
}

View file

@ -1,13 +0,0 @@
pub use converter::convert;
pub use external_config::ExternalConfig;
pub use plugin::Plugin;
pub use pyproject::parse;
mod black;
mod converter;
mod external_config;
mod isort;
mod parser;
pub mod pep621;
mod plugin;
mod pyproject;

View file

@ -12,13 +12,12 @@ pub const VERSION: &str = env!("CARGO_PKG_VERSION");
mod autofix;
mod checkers;
mod codes;
pub mod codes;
mod comments;
mod cst;
pub mod directives;
mod doc_lines;
mod docstrings;
pub mod flake8_to_ruff;
pub mod fs;
mod importer;
pub mod jupyter;
@ -32,9 +31,8 @@ pub mod packaging;
pub mod pyproject_toml;
pub mod registry;
mod renamer;
pub mod resolver;
mod rule_redirects;
mod rule_selector;
pub mod rule_selector;
pub mod rules;
pub mod settings;
pub mod source_kind;
@ -42,3 +40,5 @@ pub mod upstream_categories;
#[cfg(any(test, fuzzing))]
pub mod test;
pub const RUFF_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");

View file

@ -15,7 +15,7 @@ use crate::fs;
use crate::jupyter::Notebook;
use crate::source_kind::SourceKind;
pub(crate) static WARNINGS: Lazy<Mutex<Vec<&'static str>>> = Lazy::new(Mutex::default);
pub static WARNINGS: Lazy<Mutex<Vec<&'static str>>> = Lazy::new(Mutex::default);
/// Warn a user once, with uniqueness determined by the given ID.
#[macro_export]

View file

@ -2,10 +2,6 @@
use std::path::{Path, PathBuf};
use rustc_hash::FxHashMap;
use crate::resolver::{PyprojectConfig, Resolver};
// If we have a Python package layout like:
// - root/
// - foo/
@ -51,68 +47,6 @@ pub fn detect_package_root<'a>(
current
}
/// A wrapper around `is_package` to cache filesystem lookups.
fn is_package_with_cache<'a>(
path: &'a Path,
namespace_packages: &'a [PathBuf],
package_cache: &mut FxHashMap<&'a Path, bool>,
) -> bool {
*package_cache
.entry(path)
.or_insert_with(|| is_package(path, namespace_packages))
}
/// A wrapper around `detect_package_root` to cache filesystem lookups.
fn detect_package_root_with_cache<'a>(
path: &'a Path,
namespace_packages: &'a [PathBuf],
package_cache: &mut FxHashMap<&'a Path, bool>,
) -> Option<&'a Path> {
let mut current = None;
for parent in path.ancestors() {
if !is_package_with_cache(parent, namespace_packages, package_cache) {
return current;
}
current = Some(parent);
}
current
}
/// Return a mapping from Python package to its package root.
pub fn detect_package_roots<'a>(
files: &[&'a Path],
resolver: &'a Resolver,
pyproject_config: &'a PyprojectConfig,
) -> FxHashMap<&'a Path, Option<&'a Path>> {
// Pre-populate the module cache, since the list of files could (but isn't
// required to) contain some `__init__.py` files.
let mut package_cache: FxHashMap<&Path, bool> = FxHashMap::default();
for file in files {
if file.ends_with("__init__.py") {
if let Some(parent) = file.parent() {
package_cache.insert(parent, true);
}
}
}
// Search for the package root for each file.
let mut package_roots: FxHashMap<&Path, Option<&Path>> = FxHashMap::default();
for file in files {
let namespace_packages = &resolver.resolve(file, pyproject_config).namespace_packages;
if let Some(package) = file.parent() {
if package_roots.contains_key(package) {
continue;
}
package_roots.insert(
package,
detect_package_root_with_cache(package, namespace_packages, &mut package_cache),
);
}
}
package_roots
}
#[cfg(test)]
mod tests {
use std::path::PathBuf;

View file

@ -263,7 +263,7 @@ mod schema {
}
impl RuleSelector {
pub(crate) fn specificity(&self) -> Specificity {
pub fn specificity(&self) -> Specificity {
match self {
RuleSelector::All => Specificity::All,
RuleSelector::Nursery => Specificity::All,
@ -286,7 +286,7 @@ impl RuleSelector {
}
#[derive(EnumIter, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]
pub(crate) enum Specificity {
pub enum Specificity {
All,
LinterGroup,
Linter,

View file

@ -1,65 +1,6 @@
//! Settings for the `flake-annotations` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
Debug, PartialEq, Eq, Default, Serialize, Deserialize, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8AnnotationsOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = "false",
value_type = "bool",
example = "mypy-init-return = true"
)]
/// Whether to allow the omission of a return type hint for `__init__` if at
/// least one argument is annotated.
pub mypy_init_return: Option<bool>,
#[option(
default = "false",
value_type = "bool",
example = "suppress-dummy-args = true"
)]
/// Whether to suppress `ANN000`-level violations for arguments matching the
/// "dummy" variable regex (like `_`).
pub suppress_dummy_args: Option<bool>,
#[option(
default = "false",
value_type = "bool",
example = "suppress-none-returning = true"
)]
/// Whether to suppress `ANN200`-level violations for functions that meet
/// either of the following criteria:
///
/// - Contain no `return` statement.
/// - Explicit `return` statement(s) all return `None` (explicitly or
/// implicitly).
pub suppress_none_returning: Option<bool>,
#[option(
default = "false",
value_type = "bool",
example = "allow-star-arg-any = true"
)]
/// Whether to suppress `ANN401` for dynamically typed `*args` and
/// `**kwargs` arguments.
pub allow_star_arg_any: Option<bool>,
#[option(
default = "false",
value_type = "bool",
example = "ignore-fully-untyped = true"
)]
/// Whether to suppress `ANN*` rules for any declaration
/// that hasn't been typed at all.
/// This makes it easier to gradually add types to a codebase.
pub ignore_fully_untyped: Option<bool>,
}
use ruff_macros::CacheKey;
#[derive(Debug, Default, CacheKey)]
#[allow(clippy::struct_excessive_bools)]
@ -70,27 +11,3 @@ pub struct Settings {
pub allow_star_arg_any: bool,
pub ignore_fully_untyped: bool,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
mypy_init_return: options.mypy_init_return.unwrap_or(false),
suppress_dummy_args: options.suppress_dummy_args.unwrap_or(false),
suppress_none_returning: options.suppress_none_returning.unwrap_or(false),
allow_star_arg_any: options.allow_star_arg_any.unwrap_or(false),
ignore_fully_untyped: options.ignore_fully_untyped.unwrap_or(false),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
mypy_init_return: Some(settings.mypy_init_return),
suppress_dummy_args: Some(settings.suppress_dummy_args),
suppress_none_returning: Some(settings.suppress_none_returning),
allow_star_arg_any: Some(settings.allow_star_arg_any),
ignore_fully_untyped: Some(settings.ignore_fully_untyped),
}
}
}

View file

@ -1,81 +1,19 @@
//! Settings for the `flake8-bandit` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::CacheKey;
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
fn default_tmp_dirs() -> Vec<String> {
pub fn default_tmp_dirs() -> Vec<String> {
["/tmp", "/var/tmp", "/dev/shm"]
.map(ToString::to_string)
.to_vec()
}
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8BanditOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = "[\"/tmp\", \"/var/tmp\", \"/dev/shm\"]",
value_type = "list[str]",
example = "hardcoded-tmp-directory = [\"/foo/bar\"]"
)]
/// A list of directories to consider temporary.
pub hardcoded_tmp_directory: Option<Vec<String>>,
#[option(
default = "[]",
value_type = "list[str]",
example = "extend-hardcoded-tmp-directory = [\"/foo/bar\"]"
)]
/// A list of directories to consider temporary, in addition to those
/// specified by `hardcoded-tmp-directory`.
pub hardcoded_tmp_directory_extend: Option<Vec<String>>,
#[option(
default = "false",
value_type = "bool",
example = "check-typed-exception = true"
)]
/// Whether to disallow `try`-`except`-`pass` (`S110`) for specific
/// exception types. By default, `try`-`except`-`pass` is only
/// disallowed for `Exception` and `BaseException`.
pub check_typed_exception: Option<bool>,
}
#[derive(Debug, CacheKey)]
pub struct Settings {
pub hardcoded_tmp_directory: Vec<String>,
pub check_typed_exception: bool,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
hardcoded_tmp_directory: options
.hardcoded_tmp_directory
.unwrap_or_else(default_tmp_dirs)
.into_iter()
.chain(options.hardcoded_tmp_directory_extend.unwrap_or_default())
.collect(),
check_typed_exception: options.check_typed_exception.unwrap_or(false),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
hardcoded_tmp_directory: Some(settings.hardcoded_tmp_directory),
hardcoded_tmp_directory_extend: None,
check_typed_exception: Some(settings.check_typed_exception),
}
}
}
impl Default for Settings {
fn default() -> Self {
Self {

View file

@ -1,53 +1,8 @@
//! Settings for the `flake8-bugbear` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
Debug, PartialEq, Eq, Default, Serialize, Deserialize, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8BugbearOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
# Allow default arguments like, e.g., `data: List[str] = fastapi.Query(None)`.
extend-immutable-calls = ["fastapi.Depends", "fastapi.Query"]
"#
)]
/// Additional callable functions to consider "immutable" when evaluating, e.g., the
/// `function-call-in-default-argument` rule (`B008`) or `function-call-in-dataclass-defaults`
/// rule (`RUF009`).
///
/// Expects to receive a list of fully-qualified names (e.g., `fastapi.Query`, rather than
/// `Query`).
pub extend_immutable_calls: Option<Vec<String>>,
}
use ruff_macros::CacheKey;
#[derive(Debug, Default, CacheKey)]
pub struct Settings {
pub extend_immutable_calls: Vec<String>,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
extend_immutable_calls: options.extend_immutable_calls.unwrap_or_default(),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
extend_immutable_calls: Some(settings.extend_immutable_calls),
}
}
}

View file

@ -1,45 +1,8 @@
//! Settings for the `flake8-builtins` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
Debug, PartialEq, Eq, Default, Serialize, Deserialize, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8BuiltinsOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = "builtins-ignorelist = [\"id\"]"
)]
/// Ignore list of builtins.
pub builtins_ignorelist: Option<Vec<String>>,
}
use ruff_macros::CacheKey;
#[derive(Debug, Default, CacheKey)]
pub struct Settings {
pub builtins_ignorelist: Vec<String>,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
builtins_ignorelist: options.builtins_ignorelist.unwrap_or_default(),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
builtins_ignorelist: Some(settings.builtins_ignorelist),
}
}
}

View file

@ -1,49 +1,8 @@
//! Settings for the `flake8-comprehensions` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
Debug, PartialEq, Eq, Default, Serialize, Deserialize, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8ComprehensionsOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = "false",
value_type = "bool",
example = "allow-dict-calls-with-keyword-arguments = true"
)]
/// Allow `dict` calls that make use of keyword arguments (e.g., `dict(a=1, b=2)`).
pub allow_dict_calls_with_keyword_arguments: Option<bool>,
}
use ruff_macros::CacheKey;
#[derive(Debug, Default, CacheKey)]
pub struct Settings {
pub allow_dict_calls_with_keyword_arguments: bool,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
allow_dict_calls_with_keyword_arguments: options
.allow_dict_calls_with_keyword_arguments
.unwrap_or_default(),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
allow_dict_calls_with_keyword_arguments: Some(
settings.allow_dict_calls_with_keyword_arguments,
),
}
}
}

View file

@ -2,51 +2,8 @@
use once_cell::sync::Lazy;
use regex::Regex;
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8CopyrightOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"(?i)Copyright\s+(\(C\)\s+)?\d{4}([-,]\d{4})*"#,
value_type = "str",
example = r#"notice-rgx = "(?i)Copyright \\(C\\) \\d{4}""#
)]
/// The regular expression used to match the copyright notice, compiled
/// with the [`regex`](https://docs.rs/regex/latest/regex/) crate.
///
/// Defaults to `(?i)Copyright\s+(\(C\)\s+)?\d{4}(-\d{4})*`, which matches
/// the following:
/// - `Copyright 2023`
/// - `Copyright (C) 2023`
/// - `Copyright 2021-2023`
/// - `Copyright (C) 2021-2023`
pub notice_rgx: Option<String>,
#[option(default = "None", value_type = "str", example = r#"author = "Ruff""#)]
/// Author to enforce within the copyright notice. If provided, the
/// author must be present immediately following the copyright notice.
pub author: Option<String>,
#[option(
default = r#"0"#,
value_type = "int",
example = r#"
# Avoid enforcing a header on files smaller than 1024 bytes.
min-file-size = 1024
"#
)]
/// A minimum file size (in bytes) required for a copyright notice to
/// be enforced. By default, all files are validated.
pub min_file_size: Option<usize>,
}
use ruff_macros::CacheKey;
#[derive(Debug, CacheKey)]
pub struct Settings {
@ -55,7 +12,7 @@ pub struct Settings {
pub min_file_size: usize,
}
static COPYRIGHT: Lazy<Regex> =
pub static COPYRIGHT: Lazy<Regex> =
Lazy::new(|| Regex::new(r"(?i)Copyright\s+(\(C\)\s+)?\d{4}(-\d{4})*").unwrap());
impl Default for Settings {
@ -67,29 +24,3 @@ impl Default for Settings {
}
}
}
impl TryFrom<Options> for Settings {
type Error = anyhow::Error;
fn try_from(value: Options) -> Result<Self, Self::Error> {
Ok(Self {
notice_rgx: value
.notice_rgx
.map(|pattern| Regex::new(&pattern))
.transpose()?
.unwrap_or_else(|| COPYRIGHT.clone()),
author: value.author,
min_file_size: value.min_file_size.unwrap_or_default(),
})
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
notice_rgx: Some(settings.notice_rgx.to_string()),
author: settings.author,
min_file_size: Some(settings.min_file_size),
}
}
}

View file

@ -1,41 +1,8 @@
//! Settings for the `flake8-errmsg` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8ErrMsgOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(default = "0", value_type = "int", example = "max-string-length = 20")]
/// Maximum string length for string literals in exception messages.
pub max_string_length: Option<usize>,
}
use ruff_macros::CacheKey;
#[derive(Debug, Default, CacheKey)]
pub struct Settings {
pub max_string_length: usize,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
max_string_length: options.max_string_length.unwrap_or_default(),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
max_string_length: Some(settings.max_string_length),
}
}
}

View file

@ -1,40 +1,11 @@
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8GetTextOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"["_", "gettext", "ngettext"]"#,
value_type = "list[str]",
example = r#"function-names = ["_", "gettext", "ngettext", "ugettetxt"]"#
)]
/// The function names to consider as internationalization calls.
pub function_names: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"extend-function-names = ["ugettetxt"]"#
)]
/// Additional function names to consider as internationalization calls, in addition to those
/// included in `function-names`.
pub extend_function_names: Option<Vec<String>>,
}
use ruff_macros::CacheKey;
#[derive(Debug, CacheKey)]
pub struct Settings {
pub functions_names: Vec<String>,
}
fn default_func_names() -> Vec<String> {
pub fn default_func_names() -> Vec<String> {
vec![
"_".to_string(),
"gettext".to_string(),
@ -49,25 +20,3 @@ impl Default for Settings {
}
}
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
functions_names: options
.function_names
.unwrap_or_else(default_func_names)
.into_iter()
.chain(options.extend_function_names.unwrap_or_default())
.collect(),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
function_names: Some(settings.functions_names),
extend_function_names: Some(Vec::new()),
}
}
}

View file

@ -1,37 +1,6 @@
//! Settings for the `flake8-implicit-str-concat` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8ImplicitStrConcatOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"true"#,
value_type = "bool",
example = r#"
allow-multiline = false
"#
)]
/// Whether to allow implicit string concatenations for multiline strings.
/// By default, implicit concatenations of multiline strings are
/// allowed (but continuation lines, delimited with a backslash, are
/// prohibited).
///
/// Note that setting `allow-multiline = false` should typically be coupled
/// with disabling `explicit-string-concatenation` (`ISC003`). Otherwise,
/// both explicit and implicit multiline string concatenations will be seen
/// as violations.
pub allow_multiline: Option<bool>,
}
use ruff_macros::CacheKey;
#[derive(Debug, CacheKey)]
pub struct Settings {
@ -45,19 +14,3 @@ impl Default for Settings {
}
}
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
allow_multiline: options.allow_multiline.unwrap_or(true),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
allow_multiline: Some(settings.allow_multiline),
}
}
}

View file

@ -11,6 +11,7 @@ mod tests {
use crate::assert_messages;
use crate::registry::Rule;
use crate::rules::flake8_import_conventions::settings::default_aliases;
use crate::settings::Settings;
use crate::test::test_path;
@ -26,19 +27,19 @@ mod tests {
#[test]
fn custom() -> Result<()> {
let mut aliases = super::settings::default_aliases();
aliases.extend(FxHashMap::from_iter([
("dask.array".to_string(), "da".to_string()),
("dask.dataframe".to_string(), "dd".to_string()),
]));
let diagnostics = test_path(
Path::new("flake8_import_conventions/custom.py"),
&Settings {
flake8_import_conventions: super::settings::Options {
aliases: None,
extend_aliases: Some(FxHashMap::from_iter([
("dask.array".to_string(), "da".to_string()),
("dask.dataframe".to_string(), "dd".to_string()),
])),
banned_aliases: None,
banned_from: None,
}
.into(),
flake8_import_conventions: super::settings::Settings {
aliases,
banned_aliases: FxHashMap::default(),
banned_from: FxHashSet::default(),
},
..Settings::for_rule(Rule::UnconventionalImportAlias)
},
)?;
@ -51,10 +52,9 @@ mod tests {
let diagnostics = test_path(
Path::new("flake8_import_conventions/custom_banned.py"),
&Settings {
flake8_import_conventions: super::settings::Options {
aliases: None,
extend_aliases: None,
banned_aliases: Some(FxHashMap::from_iter([
flake8_import_conventions: super::settings::Settings {
aliases: default_aliases(),
banned_aliases: FxHashMap::from_iter([
(
"typing".to_string(),
vec!["t".to_string(), "ty".to_string()],
@ -68,10 +68,9 @@ mod tests {
vec!["K".to_string()],
),
("torch.nn.functional".to_string(), vec!["F".to_string()]),
])),
banned_from: None,
}
.into(),
]),
banned_from: FxHashSet::default(),
},
..Settings::for_rule(Rule::BannedImportAlias)
},
)?;
@ -84,17 +83,15 @@ mod tests {
let diagnostics = test_path(
Path::new("flake8_import_conventions/custom_banned_from.py"),
&Settings {
flake8_import_conventions: super::settings::Options {
aliases: None,
extend_aliases: None,
banned_aliases: None,
banned_from: Some(FxHashSet::from_iter([
flake8_import_conventions: super::settings::Settings {
aliases: default_aliases(),
banned_aliases: FxHashMap::default(),
banned_from: FxHashSet::from_iter([
"logging.config".to_string(),
"typing".to_string(),
"pandas".to_string(),
])),
}
.into(),
]),
},
..Settings::for_rule(Rule::BannedImportFrom)
},
)?;
@ -107,18 +104,16 @@ mod tests {
let diagnostics = test_path(
Path::new("flake8_import_conventions/remove_default.py"),
&Settings {
flake8_import_conventions: super::settings::Options {
aliases: Some(FxHashMap::from_iter([
flake8_import_conventions: super::settings::Settings {
aliases: FxHashMap::from_iter([
("altair".to_string(), "alt".to_string()),
("matplotlib.pyplot".to_string(), "plt".to_string()),
("pandas".to_string(), "pd".to_string()),
("seaborn".to_string(), "sns".to_string()),
])),
extend_aliases: None,
banned_aliases: None,
banned_from: None,
}
.into(),
]),
banned_aliases: FxHashMap::default(),
banned_from: FxHashSet::default(),
},
..Settings::for_rule(Rule::UnconventionalImportAlias)
},
)?;
@ -128,19 +123,20 @@ mod tests {
#[test]
fn override_defaults() -> Result<()> {
let mut aliases = super::settings::default_aliases();
aliases.extend(FxHashMap::from_iter([(
"numpy".to_string(),
"nmp".to_string(),
)]));
let diagnostics = test_path(
Path::new("flake8_import_conventions/override_default.py"),
&Settings {
flake8_import_conventions: super::settings::Options {
aliases: None,
extend_aliases: Some(FxHashMap::from_iter([(
"numpy".to_string(),
"nmp".to_string(),
)])),
banned_aliases: None,
banned_from: None,
}
.into(),
flake8_import_conventions: super::settings::Settings {
aliases,
banned_aliases: FxHashMap::default(),
banned_from: FxHashSet::default(),
},
..Settings::for_rule(Rule::UnconventionalImportAlias)
},
)?;
@ -150,22 +146,23 @@ mod tests {
#[test]
fn from_imports() -> Result<()> {
let diagnostics = test_path(
Path::new("flake8_import_conventions/from_imports.py"),
&Settings {
flake8_import_conventions: super::settings::Options {
aliases: None,
extend_aliases: Some(FxHashMap::from_iter([
let mut aliases = super::settings::default_aliases();
aliases.extend(FxHashMap::from_iter([
("xml.dom.minidom".to_string(), "md".to_string()),
(
"xml.dom.minidom.parseString".to_string(),
"pstr".to_string(),
),
])),
banned_aliases: None,
banned_from: None,
}
.into(),
]));
let diagnostics = test_path(
Path::new("flake8_import_conventions/from_imports.py"),
&Settings {
flake8_import_conventions: super::settings::Settings {
aliases,
banned_aliases: FxHashMap::default(),
banned_from: FxHashSet::default(),
},
..Settings::for_rule(Rule::UnconventionalImportAlias)
},
)?;

View file

@ -1,9 +1,8 @@
//! Settings for import conventions.
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
use ruff_macros::CacheKey;
const CONVENTIONAL_ALIASES: &[(&str, &str)] = &[
("altair", "alt"),
@ -22,68 +21,6 @@ const CONVENTIONAL_ALIASES: &[(&str, &str)] = &[
("pyarrow", "pa"),
];
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8ImportConventionsOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"{"altair": "alt", "matplotlib": "mpl", "matplotlib.pyplot": "plt", "numpy": "np", "pandas": "pd", "seaborn": "sns", "tensorflow": "tf", "tkinter": "tk", "holoviews": "hv", "panel": "pn", "plotly.express": "px", "polars": "pl", "pyarrow": "pa"}"#,
value_type = "dict[str, str]",
example = r#"
[tool.ruff.flake8-import-conventions.aliases]
# Declare the default aliases.
altair = "alt"
"matplotlib.pyplot" = "plt"
numpy = "np"
pandas = "pd"
seaborn = "sns"
scipy = "sp"
"#
)]
/// The conventional aliases for imports. These aliases can be extended by
/// the `extend_aliases` option.
pub aliases: Option<FxHashMap<String, String>>,
#[option(
default = r#"{}"#,
value_type = "dict[str, str]",
example = r#"
[tool.ruff.flake8-import-conventions.extend-aliases]
# Declare a custom alias for the `matplotlib` module.
"dask.dataframe" = "dd"
"#
)]
/// A mapping from module to conventional import alias. These aliases will
/// be added to the `aliases` mapping.
pub extend_aliases: Option<FxHashMap<String, String>>,
#[option(
default = r#"{}"#,
value_type = "dict[str, list[str]]",
example = r#"
[tool.ruff.flake8-import-conventions.banned-aliases]
# Declare the banned aliases.
"tensorflow.keras.backend" = ["K"]
"#
)]
/// A mapping from module to its banned import aliases.
pub banned_aliases: Option<FxHashMap<String, Vec<String>>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
# Declare the banned `from` imports.
banned-from = ["typing"]
"#
)]
/// A list of modules that are allowed to be imported from
pub banned_from: Option<FxHashSet<String>>,
}
#[derive(Debug, CacheKey)]
pub struct Settings {
pub aliases: FxHashMap<String, String>,
@ -91,7 +28,7 @@ pub struct Settings {
pub banned_from: FxHashSet<String>,
}
fn default_aliases() -> FxHashMap<String, String> {
pub fn default_aliases() -> FxHashMap<String, String> {
CONVENTIONAL_ALIASES
.iter()
.map(|(k, v)| ((*k).to_string(), (*v).to_string()))
@ -107,32 +44,3 @@ impl Default for Settings {
}
}
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
let mut aliases = match options.aliases {
Some(options_aliases) => options_aliases,
None => default_aliases(),
};
if let Some(extend_aliases) = options.extend_aliases {
aliases.extend(extend_aliases);
}
Self {
aliases,
banned_aliases: options.banned_aliases.unwrap_or_default(),
banned_from: options.banned_from.unwrap_or_default(),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
aliases: Some(settings.aliases),
extend_aliases: None,
banned_aliases: None,
banned_from: None,
}
}
}

View file

@ -2,14 +2,13 @@
use std::error::Error;
use std::fmt;
use serde::{Deserialize, Serialize};
use ruff_macros::CacheKey;
use crate::settings::types::IdentifierPattern;
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
use super::types;
fn default_broad_exceptions() -> Vec<IdentifierPattern> {
pub fn default_broad_exceptions() -> Vec<IdentifierPattern> {
[
"BaseException",
"Exception",
@ -23,106 +22,6 @@ fn default_broad_exceptions() -> Vec<IdentifierPattern> {
.to_vec()
}
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8PytestStyleOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = "true",
value_type = "bool",
example = "fixture-parentheses = true"
)]
/// Boolean flag specifying whether `@pytest.fixture()` without parameters
/// should have parentheses. If the option is set to `true` (the
/// default), `@pytest.fixture()` is valid and `@pytest.fixture` is
/// invalid. If set to `false`, `@pytest.fixture` is valid and
/// `@pytest.fixture()` is invalid.
pub fixture_parentheses: Option<bool>,
#[option(
default = "tuple",
value_type = r#""csv" | "tuple" | "list""#,
example = "parametrize-names-type = \"list\""
)]
/// Expected type for multiple argument names in `@pytest.mark.parametrize`.
/// The following values are supported:
///
/// - `csv` — a comma-separated list, e.g.
/// `@pytest.mark.parametrize('name1,name2', ...)`
/// - `tuple` (default) — e.g.
/// `@pytest.mark.parametrize(('name1', 'name2'), ...)`
/// - `list` — e.g. `@pytest.mark.parametrize(['name1', 'name2'], ...)`
pub parametrize_names_type: Option<types::ParametrizeNameType>,
#[option(
default = "list",
value_type = r#""tuple" | "list""#,
example = "parametrize-values-type = \"tuple\""
)]
/// Expected type for the list of values rows in `@pytest.mark.parametrize`.
/// The following values are supported:
///
/// - `tuple` — e.g. `@pytest.mark.parametrize('name', (1, 2, 3))`
/// - `list` (default) — e.g. `@pytest.mark.parametrize('name', [1, 2, 3])`
pub parametrize_values_type: Option<types::ParametrizeValuesType>,
#[option(
default = "tuple",
value_type = r#""tuple" | "list""#,
example = "parametrize-values-row-type = \"list\""
)]
/// Expected type for each row of values in `@pytest.mark.parametrize` in
/// case of multiple parameters. The following values are supported:
///
/// - `tuple` (default) — e.g.
/// `@pytest.mark.parametrize(('name1', 'name2'), [(1, 2), (3, 4)])`
/// - `list` — e.g.
/// `@pytest.mark.parametrize(('name1', 'name2'), [[1, 2], [3, 4]])`
pub parametrize_values_row_type: Option<types::ParametrizeValuesRowType>,
#[option(
default = r#"["BaseException", "Exception", "ValueError", "OSError", "IOError", "EnvironmentError", "socket.error"]"#,
value_type = "list[str]",
example = "raises-require-match-for = [\"requests.RequestException\"]"
)]
/// List of exception names that require a match= parameter in a
/// `pytest.raises()` call.
///
/// Supports glob patterns. For more information on the glob syntax, refer
/// to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
pub raises_require_match_for: Option<Vec<String>>,
#[option(
default = "[]",
value_type = "list[str]",
example = "raises-extend-require-match-for = [\"requests.RequestException\"]"
)]
/// List of additional exception names that require a match= parameter in a
/// `pytest.raises()` call. This extends the default list of exceptions
/// that require a match= parameter.
/// This option is useful if you want to extend the default list of
/// exceptions that require a match= parameter without having to specify
/// the entire list.
/// Note that this option does not remove any exceptions from the default
/// list.
///
/// Supports glob patterns. For more information on the glob syntax, refer
/// to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
pub raises_extend_require_match_for: Option<Vec<String>>,
#[option(
default = "true",
value_type = "bool",
example = "mark-parentheses = true"
)]
/// Boolean flag specifying whether `@pytest.mark.foo()` without parameters
/// should have parentheses. If the option is set to `true` (the
/// default), `@pytest.mark.foo()` is valid and `@pytest.mark.foo` is
/// invalid. If set to `false`, `@pytest.fixture` is valid and
/// `@pytest.mark.foo()` is invalid.
pub mark_parentheses: Option<bool>,
}
#[derive(Debug, CacheKey)]
pub struct Settings {
pub fixture_parentheses: bool,
@ -134,67 +33,6 @@ pub struct Settings {
pub mark_parentheses: bool,
}
impl TryFrom<Options> for Settings {
type Error = SettingsError;
fn try_from(options: Options) -> Result<Self, Self::Error> {
Ok(Self {
fixture_parentheses: options.fixture_parentheses.unwrap_or(true),
parametrize_names_type: options.parametrize_names_type.unwrap_or_default(),
parametrize_values_type: options.parametrize_values_type.unwrap_or_default(),
parametrize_values_row_type: options.parametrize_values_row_type.unwrap_or_default(),
raises_require_match_for: options
.raises_require_match_for
.map(|patterns| {
patterns
.into_iter()
.map(|pattern| IdentifierPattern::new(&pattern))
.collect()
})
.transpose()
.map_err(SettingsError::InvalidRaisesRequireMatchFor)?
.unwrap_or_else(default_broad_exceptions),
raises_extend_require_match_for: options
.raises_extend_require_match_for
.map(|patterns| {
patterns
.into_iter()
.map(|pattern| IdentifierPattern::new(&pattern))
.collect()
})
.transpose()
.map_err(SettingsError::InvalidRaisesExtendRequireMatchFor)?
.unwrap_or_default(),
mark_parentheses: options.mark_parentheses.unwrap_or(true),
})
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
fixture_parentheses: Some(settings.fixture_parentheses),
parametrize_names_type: Some(settings.parametrize_names_type),
parametrize_values_type: Some(settings.parametrize_values_type),
parametrize_values_row_type: Some(settings.parametrize_values_row_type),
raises_require_match_for: Some(
settings
.raises_require_match_for
.iter()
.map(ToString::to_string)
.collect(),
),
raises_extend_require_match_for: Some(
settings
.raises_extend_require_match_for
.iter()
.map(ToString::to_string)
.collect(),
),
mark_parentheses: Some(settings.mark_parentheses),
}
}
}
impl Default for Settings {
fn default() -> Self {
Self {

View file

@ -2,7 +2,7 @@
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
use ruff_macros::CacheKey;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
@ -20,59 +20,6 @@ impl Default for Quote {
}
}
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8QuotesOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#""double""#,
value_type = r#""single" | "double""#,
example = r#"
inline-quotes = "single"
"#
)]
/// Quote style to prefer for inline strings (either "single" or
/// "double").
pub inline_quotes: Option<Quote>,
#[option(
default = r#""double""#,
value_type = r#""single" | "double""#,
example = r#"
multiline-quotes = "single"
"#
)]
/// Quote style to prefer for multiline strings (either "single" or
/// "double").
pub multiline_quotes: Option<Quote>,
#[option(
default = r#""double""#,
value_type = r#""single" | "double""#,
example = r#"
docstring-quotes = "single"
"#
)]
/// Quote style to prefer for docstrings (either "single" or "double").
pub docstring_quotes: Option<Quote>,
#[option(
default = r#"true"#,
value_type = "bool",
example = r#"
# Don't bother trying to avoid escapes.
avoid-escape = false
"#
)]
/// Whether to avoid using single quotes if a string contains single quotes,
/// or vice-versa with double quotes, as per [PEP 8](https://peps.python.org/pep-0008/#string-quotes).
/// This minimizes the need to escape quotation marks within strings.
pub avoid_escape: Option<bool>,
}
#[derive(Debug, CacheKey)]
pub struct Settings {
pub inline_quotes: Quote,
@ -91,25 +38,3 @@ impl Default for Settings {
}
}
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
inline_quotes: options.inline_quotes.unwrap_or_default(),
multiline_quotes: options.multiline_quotes.unwrap_or_default(),
docstring_quotes: options.docstring_quotes.unwrap_or_default(),
avoid_escape: options.avoid_escape.unwrap_or(true),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
inline_quotes: Some(settings.inline_quotes),
multiline_quotes: Some(settings.multiline_quotes),
docstring_quotes: Some(settings.docstring_quotes),
avoid_escape: Some(settings.avoid_escape),
}
}
}

View file

@ -1,13 +1,11 @@
//! Settings for the `flake8-self` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
use ruff_macros::CacheKey;
// By default, ignore the `namedtuple` methods and attributes, as well as the
// _sunder_ names in Enum, which are underscore-prefixed to prevent conflicts
// with field names.
const IGNORE_NAMES: [&str; 7] = [
pub const IGNORE_NAMES: [&str; 7] = [
"_make",
"_asdict",
"_replace",
@ -17,27 +15,6 @@ const IGNORE_NAMES: [&str; 7] = [
"_value_",
];
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8SelfOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"["_make", "_asdict", "_replace", "_fields", "_field_defaults", "_name_", "_value_"]"#,
value_type = "list[str]",
example = r#"
ignore-names = ["_new"]
"#
)]
/// A list of names to ignore when considering `flake8-self` violations.
pub ignore_names: Option<Vec<String>>,
}
#[derive(Debug, CacheKey)]
pub struct Settings {
pub ignore_names: Vec<String>,
@ -50,21 +27,3 @@ impl Default for Settings {
}
}
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
ignore_names: options
.ignore_names
.unwrap_or_else(|| IGNORE_NAMES.map(String::from).to_vec()),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
ignore_names: Some(settings.ignore_names),
}
}
}

View file

@ -1,6 +1,5 @@
//! Rules from [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/).
pub(crate) mod matchers;
pub mod options;
pub(crate) mod rules;
pub mod settings;

View file

@ -1,77 +0,0 @@
//! Settings for the `flake8-tidy-imports` plugin.
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use ruff_macros::{CombineOptions, ConfigurationOptions};
use super::settings::{ApiBan, Settings, Strictness};
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8TidyImportsOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#""parents""#,
value_type = r#""parents" | "all""#,
example = r#"
# Disallow all relative imports.
ban-relative-imports = "all"
"#
)]
/// Whether to ban all relative imports (`"all"`), or only those imports
/// that extend into the parent module or beyond (`"parents"`).
pub ban_relative_imports: Option<Strictness>,
#[option(
default = r#"{}"#,
value_type = r#"dict[str, { "msg": str }]"#,
example = r#"
[tool.ruff.flake8-tidy-imports.banned-api]
"cgi".msg = "The cgi module is deprecated, see https://peps.python.org/pep-0594/#cgi."
"typing.TypedDict".msg = "Use typing_extensions.TypedDict instead."
"#
)]
/// Specific modules or module members that may not be imported or accessed.
/// Note that this rule is only meant to flag accidental uses,
/// and can be circumvented via `eval` or `importlib`.
pub banned_api: Option<FxHashMap<String, ApiBan>>,
#[option(
default = r#"[]"#,
value_type = r#"list[str]"#,
example = r#"
# Ban certain modules from being imported at module level, instead requiring
# that they're imported lazily (e.g., within a function definition).
banned-module-level-imports = ["torch", "tensorflow"]
"#
)]
/// List of specific modules that may not be imported at module level, and should instead be
/// imported lazily (e.g., within a function definition, or an `if TYPE_CHECKING:`
/// block, or some other nested context).
pub banned_module_level_imports: Option<Vec<String>>,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
ban_relative_imports: options.ban_relative_imports.unwrap_or(Strictness::Parents),
banned_api: options.banned_api.unwrap_or_default(),
banned_module_level_imports: options.banned_module_level_imports.unwrap_or_default(),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
ban_relative_imports: Some(settings.ban_relative_imports),
banned_api: Some(settings.banned_api),
banned_module_level_imports: Some(settings.banned_module_level_imports),
}
}
}

View file

@ -1,70 +1,6 @@
//! Settings for the `flake8-type-checking` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8TypeCheckingOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = "false",
value_type = "bool",
example = r#"
strict = true
"#
)]
/// Enforce TC001, TC002, and TC003 rules even when valid runtime imports
/// are present for the same module.
///
/// See flake8-type-checking's [strict](https://github.com/snok/flake8-type-checking#strict) option.
pub strict: Option<bool>,
#[option(
default = "[\"typing\"]",
value_type = "list[str]",
example = r#"
exempt-modules = ["typing", "typing_extensions"]
"#
)]
/// Exempt certain modules from needing to be moved into type-checking
/// blocks.
pub exempt_modules: Option<Vec<String>>,
#[option(
default = "[]",
value_type = "list[str]",
example = r#"
runtime-evaluated-base-classes = ["pydantic.BaseModel", "sqlalchemy.orm.DeclarativeBase"]
"#
)]
/// Exempt classes that list any of the enumerated classes as a base class
/// from needing to be moved into type-checking blocks.
///
/// Common examples include Pydantic's `pydantic.BaseModel` and SQLAlchemy's
/// `sqlalchemy.orm.DeclarativeBase`, but can also support user-defined
/// classes that inherit from those base classes. For example, if you define
/// a common `DeclarativeBase` subclass that's used throughout your project
/// (e.g., `class Base(DeclarativeBase) ...` in `base.py`), you can add it to
/// this list (`runtime-evaluated-base-classes = ["base.Base"]`) to exempt
/// models from being moved into type-checking blocks.
pub runtime_evaluated_base_classes: Option<Vec<String>>,
#[option(
default = "[]",
value_type = "list[str]",
example = r#"
runtime-evaluated-decorators = ["attrs.define", "attrs.frozen"]
"#
)]
/// Exempt classes decorated with any of the enumerated decorators from
/// needing to be moved into type-checking blocks.
pub runtime_evaluated_decorators: Option<Vec<String>>,
}
use ruff_macros::CacheKey;
#[derive(Debug, CacheKey)]
pub struct Settings {
@ -84,29 +20,3 @@ impl Default for Settings {
}
}
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
strict: options.strict.unwrap_or(false),
exempt_modules: options
.exempt_modules
.unwrap_or_else(|| vec!["typing".to_string()]),
runtime_evaluated_base_classes: options
.runtime_evaluated_base_classes
.unwrap_or_default(),
runtime_evaluated_decorators: options.runtime_evaluated_decorators.unwrap_or_default(),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
strict: Some(settings.strict),
exempt_modules: Some(settings.exempt_modules),
runtime_evaluated_base_classes: Some(settings.runtime_evaluated_base_classes),
runtime_evaluated_decorators: Some(settings.runtime_evaluated_decorators),
}
}
}

View file

@ -1,45 +1,8 @@
//! Settings for the `flake8-unused-arguments` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Flake8UnusedArgumentsOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = "false",
value_type = "bool",
example = "ignore-variadic-names = true"
)]
/// Whether to allow unused variadic arguments, like `*args` and `**kwargs`.
pub ignore_variadic_names: Option<bool>,
}
use ruff_macros::CacheKey;
#[derive(Debug, Default, CacheKey)]
pub struct Settings {
pub ignore_variadic_names: bool,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
ignore_variadic_names: options.ignore_variadic_names.unwrap_or_default(),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
ignore_variadic_names: Some(settings.ignore_variadic_names),
}
}
}

View file

@ -223,7 +223,7 @@ pub struct KnownModules {
}
impl KnownModules {
pub(crate) fn new(
pub fn new(
first_party: Vec<glob::Pattern>,
third_party: Vec<glob::Pattern>,
local_folder: Vec<glob::Pattern>,
@ -328,7 +328,7 @@ impl KnownModules {
}
/// Return the list of modules that are known to be of a given type.
pub(crate) fn modules_for_known_type(
pub fn modules_for_known_type(
&self,
import_type: ImportType,
) -> impl Iterator<Item = &glob::Pattern> {
@ -348,7 +348,7 @@ impl KnownModules {
}
/// Return the list of user-defined modules, indexed by section.
pub(crate) fn user_defined(&self) -> FxHashMap<&str, Vec<&glob::Pattern>> {
pub fn user_defined(&self) -> FxHashMap<&str, Vec<&glob::Pattern>> {
let mut user_defined: FxHashMap<&str, Vec<&glob::Pattern>> = FxHashMap::default();
for (module, section) in &self.known {
if let ImportSection::UserDefined(section_name) = section {

View file

@ -26,7 +26,7 @@ use crate::settings::types::PythonVersion;
mod annotate;
pub(crate) mod block;
mod categorize;
pub mod categorize;
mod comments;
mod format;
mod helpers;

View file

@ -3,18 +3,14 @@
use std::collections::BTreeSet;
use std::error::Error;
use std::fmt;
use std::hash::BuildHasherDefault;
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use strum::IntoEnumIterator;
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
use ruff_macros::CacheKey;
use crate::rules::isort::categorize::KnownModules;
use crate::rules::isort::ImportType;
use crate::settings::types::IdentifierPattern;
use crate::warn_user_once;
use super::categorize::ImportSection;
@ -36,305 +32,6 @@ impl Default for RelativeImportsOrder {
}
}
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "IsortOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"false"#,
value_type = "bool",
example = r#"
force-wrap-aliases = true
combine-as-imports = true
"#
)]
/// Force `import from` statements with multiple members and at least one
/// alias (e.g., `import A as B`) to wrap such that every line contains
/// exactly one member. For example, this formatting would be retained,
/// rather than condensing to a single line:
///
/// ```python
/// from .utils import (
/// test_directory as test_directory,
/// test_id as test_id
/// )
/// ```
///
/// Note that this setting is only effective when combined with
/// `combine-as-imports = true`. When `combine-as-imports` isn't
/// enabled, every aliased `import from` will be given its own line, in
/// which case, wrapping is not necessary.
pub force_wrap_aliases: Option<bool>,
#[option(
default = r#"false"#,
value_type = "bool",
example = r#"force-single-line = true"#
)]
/// Forces all from imports to appear on their own line.
pub force_single_line: Option<bool>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
single-line-exclusions = ["os", "json"]
"#
)]
/// One or more modules to exclude from the single line rule.
pub single_line_exclusions: Option<Vec<String>>,
#[option(
default = r#"false"#,
value_type = "bool",
example = r#"
combine-as-imports = true
"#
)]
/// Combines as imports on the same line. See isort's [`combine-as-imports`](https://pycqa.github.io/isort/docs/configuration/options.html#combine-as-imports)
/// option.
pub combine_as_imports: Option<bool>,
#[option(
default = r#"true"#,
value_type = "bool",
example = r#"
split-on-trailing-comma = false
"#
)]
/// If a comma is placed after the last member in a multi-line import, then
/// the imports will never be folded into one line.
///
/// See isort's [`split-on-trailing-comma`](https://pycqa.github.io/isort/docs/configuration/options.html#split-on-trailing-comma) option.
pub split_on_trailing_comma: Option<bool>,
#[option(
default = r#"true"#,
value_type = "bool",
example = r#"
order-by-type = true
"#
)]
/// Order imports by type, which is determined by case, in addition to
/// alphabetically.
pub order_by_type: Option<bool>,
#[option(
default = r#"false"#,
value_type = "bool",
example = r#"
force-sort-within-sections = true
"#
)]
/// Don't sort straight-style imports (like `import sys`) before from-style
/// imports (like `from itertools import groupby`). Instead, sort the
/// imports by module, independent of import style.
pub force_sort_within_sections: Option<bool>,
#[option(
default = r#"false"#,
value_type = "bool",
example = r#"
case-sensitive = true
"#
)]
/// Sort imports taking into account case sensitivity.
pub case_sensitive: Option<bool>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
force-to-top = ["src"]
"#
)]
/// Force specific imports to the top of their appropriate section.
pub force_to_top: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
known-first-party = ["src"]
"#
)]
/// A list of modules to consider first-party, regardless of whether they
/// can be identified as such via introspection of the local filesystem.
///
/// Supports glob patterns. For more information on the glob syntax, refer
/// to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
pub known_first_party: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
known-third-party = ["src"]
"#
)]
/// A list of modules to consider third-party, regardless of whether they
/// can be identified as such via introspection of the local filesystem.
///
/// Supports glob patterns. For more information on the glob syntax, refer
/// to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
pub known_third_party: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
known-local-folder = ["src"]
"#
)]
/// A list of modules to consider being a local folder.
/// Generally, this is reserved for relative imports (`from . import module`).
///
/// Supports glob patterns. For more information on the glob syntax, refer
/// to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
pub known_local_folder: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
extra-standard-library = ["path"]
"#
)]
/// A list of modules to consider standard-library, in addition to those
/// known to Ruff in advance.
///
/// Supports glob patterns. For more information on the glob syntax, refer
/// to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
pub extra_standard_library: Option<Vec<String>>,
#[option(
default = r#"furthest-to-closest"#,
value_type = r#""furthest-to-closest" | "closest-to-furthest""#,
example = r#"
relative-imports-order = "closest-to-furthest"
"#
)]
/// Whether to place "closer" imports (fewer `.` characters, most local)
/// before "further" imports (more `.` characters, least local), or vice
/// versa.
///
/// The default ("furthest-to-closest") is equivalent to isort's
/// `reverse-relative` default (`reverse-relative = false`); setting
/// this to "closest-to-furthest" is equivalent to isort's
/// `reverse-relative = true`.
pub relative_imports_order: Option<RelativeImportsOrder>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
required-imports = ["from __future__ import annotations"]
"#
)]
/// Add the specified import line to all files.
pub required_imports: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
classes = ["SVC"]
"#
)]
/// An override list of tokens to always recognize as a Class for
/// `order-by-type` regardless of casing.
pub classes: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
constants = ["constant"]
"#
)]
/// An override list of tokens to always recognize as a CONSTANT
/// for `order-by-type` regardless of casing.
pub constants: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
variables = ["VAR"]
"#
)]
/// An override list of tokens to always recognize as a var
/// for `order-by-type` regardless of casing.
pub variables: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = r#"list["future" | "standard-library" | "third-party" | "first-party" | "local-folder" | str]"#,
example = r#"
no-lines-before = ["future", "standard-library"]
"#
)]
/// A list of sections that should _not_ be delineated from the previous
/// section via empty lines.
pub no_lines_before: Option<Vec<ImportSection>>,
#[option(
default = r#"-1"#,
value_type = "int",
example = r#"
# Use a single line after each import block.
lines-after-imports = 1
"#
)]
/// The number of blank lines to place after imports.
/// Use `-1` for automatic determination.
pub lines_after_imports: Option<isize>,
#[option(
default = r#"0"#,
value_type = "int",
example = r#"
# Use a single line between direct and from import
lines-between-types = 1
"#
)]
/// The number of lines to place between "direct" and `import from` imports.
pub lines_between_types: Option<usize>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
forced-separate = ["tests"]
"#
)]
/// A list of modules to separate into auxiliary block(s) of imports,
/// in the order specified.
pub forced_separate: Option<Vec<String>>,
#[option(
default = r#"["future", "standard-library", "third-party", "first-party", "local-folder"]"#,
value_type = r#"list["future" | "standard-library" | "third-party" | "first-party" | "local-folder" | str]"#,
example = r#"
section-order = ["future", "standard-library", "first-party", "local-folder", "third-party"]
"#
)]
/// Override in which order the sections should be output. Can be used to move custom sections.
pub section_order: Option<Vec<ImportSection>>,
#[option(
default = r#"true"#,
value_type = "bool",
example = r#"
detect-same-package = false
"#
)]
/// Whether to automatically mark imports from within the same package as first-party.
/// For example, when `detect-same-package = true`, then when analyzing files within the
/// `foo` package, any imports from within the `foo` package will be considered first-party.
///
/// This heuristic is often unnecessary when `src` is configured to detect all first-party
/// sources; however, if `src` is _not_ configured, this heuristic can be useful to detect
/// first-party imports from _within_ (but not _across_) first-party packages.
pub detect_same_package: Option<bool>,
// Tables are required to go last.
#[option(
default = "{}",
value_type = "dict[str, list[str]]",
example = r#"
# Group all Django imports into a separate section.
[tool.ruff.isort.sections]
"django" = ["django"]
"#
)]
/// A list of mappings from section names to modules.
/// By default custom sections are output last, but this can be overridden with `section-order`.
pub sections: Option<FxHashMap<ImportSection, Vec<String>>>,
}
#[derive(Debug, CacheKey)]
#[allow(clippy::struct_excessive_bools)]
pub struct Settings {
@ -389,170 +86,6 @@ impl Default for Settings {
}
}
impl TryFrom<Options> for Settings {
type Error = SettingsError;
fn try_from(options: Options) -> Result<Self, Self::Error> {
// Extract any configuration options that deal with user-defined sections.
let mut section_order: Vec<_> = options
.section_order
.unwrap_or_else(|| ImportType::iter().map(ImportSection::Known).collect());
let known_first_party = options
.known_first_party
.map(|names| {
names
.into_iter()
.map(|name| IdentifierPattern::new(&name))
.collect()
})
.transpose()
.map_err(SettingsError::InvalidKnownFirstParty)?
.unwrap_or_default();
let known_third_party = options
.known_third_party
.map(|names| {
names
.into_iter()
.map(|name| IdentifierPattern::new(&name))
.collect()
})
.transpose()
.map_err(SettingsError::InvalidKnownThirdParty)?
.unwrap_or_default();
let known_local_folder = options
.known_local_folder
.map(|names| {
names
.into_iter()
.map(|name| IdentifierPattern::new(&name))
.collect()
})
.transpose()
.map_err(SettingsError::InvalidKnownLocalFolder)?
.unwrap_or_default();
let extra_standard_library = options
.extra_standard_library
.map(|names| {
names
.into_iter()
.map(|name| IdentifierPattern::new(&name))
.collect()
})
.transpose()
.map_err(SettingsError::InvalidExtraStandardLibrary)?
.unwrap_or_default();
let no_lines_before = options.no_lines_before.unwrap_or_default();
let sections = options.sections.unwrap_or_default();
// Verify that `sections` doesn't contain any built-in sections.
let sections: FxHashMap<String, Vec<glob::Pattern>> = sections
.into_iter()
.filter_map(|(section, modules)| match section {
ImportSection::Known(section) => {
warn_user_once!("`sections` contains built-in section: `{:?}`", section);
None
}
ImportSection::UserDefined(section) => Some((section, modules)),
})
.map(|(section, modules)| {
let modules = modules
.into_iter()
.map(|module| {
IdentifierPattern::new(&module)
.map_err(SettingsError::InvalidUserDefinedSection)
})
.collect::<Result<Vec<_>, Self::Error>>()?;
Ok((section, modules))
})
.collect::<Result<_, _>>()?;
// Verify that `section_order` doesn't contain any duplicates.
let mut seen =
FxHashSet::with_capacity_and_hasher(section_order.len(), BuildHasherDefault::default());
for section in &section_order {
if !seen.insert(section) {
warn_user_once!(
"`section-order` contains duplicate section: `{:?}`",
section
);
}
}
// Verify that all sections listed in `section_order` are defined in `sections`.
for section in &section_order {
if let ImportSection::UserDefined(section_name) = section {
if !sections.contains_key(section_name) {
warn_user_once!("`section-order` contains unknown section: `{:?}`", section,);
}
}
}
// Verify that all sections listed in `no_lines_before` are defined in `sections`.
for section in &no_lines_before {
if let ImportSection::UserDefined(section_name) = section {
if !sections.contains_key(section_name) {
warn_user_once!(
"`no-lines-before` contains unknown section: `{:?}`",
section,
);
}
}
}
// Add all built-in sections to `section_order`, if not already present.
for section in ImportType::iter().map(ImportSection::Known) {
if !section_order.contains(&section) {
warn_user_once!(
"`section-order` is missing built-in section: `{:?}`",
section
);
section_order.push(section);
}
}
// Add all user-defined sections to `section-order`, if not already present.
for section_name in sections.keys() {
let section = ImportSection::UserDefined(section_name.clone());
if !section_order.contains(&section) {
warn_user_once!("`section-order` is missing section: `{:?}`", section);
section_order.push(section);
}
}
Ok(Self {
required_imports: BTreeSet::from_iter(options.required_imports.unwrap_or_default()),
combine_as_imports: options.combine_as_imports.unwrap_or(false),
force_single_line: options.force_single_line.unwrap_or(false),
force_sort_within_sections: options.force_sort_within_sections.unwrap_or(false),
case_sensitive: options.case_sensitive.unwrap_or(false),
force_wrap_aliases: options.force_wrap_aliases.unwrap_or(false),
detect_same_package: options.detect_same_package.unwrap_or(true),
force_to_top: BTreeSet::from_iter(options.force_to_top.unwrap_or_default()),
known_modules: KnownModules::new(
known_first_party,
known_third_party,
known_local_folder,
extra_standard_library,
sections,
),
order_by_type: options.order_by_type.unwrap_or(true),
relative_imports_order: options.relative_imports_order.unwrap_or_default(),
single_line_exclusions: BTreeSet::from_iter(
options.single_line_exclusions.unwrap_or_default(),
),
split_on_trailing_comma: options.split_on_trailing_comma.unwrap_or(true),
classes: BTreeSet::from_iter(options.classes.unwrap_or_default()),
constants: BTreeSet::from_iter(options.constants.unwrap_or_default()),
variables: BTreeSet::from_iter(options.variables.unwrap_or_default()),
no_lines_before: BTreeSet::from_iter(no_lines_before),
lines_after_imports: options.lines_after_imports.unwrap_or(-1),
lines_between_types: options.lines_between_types.unwrap_or_default(),
forced_separate: Vec::from_iter(options.forced_separate.unwrap_or_default()),
section_order,
})
}
}
/// Error returned by the [`TryFrom`] implementation of [`Settings`].
#[derive(Debug)]
pub enum SettingsError {
@ -596,71 +129,3 @@ impl Error for SettingsError {
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
required_imports: Some(settings.required_imports.into_iter().collect()),
combine_as_imports: Some(settings.combine_as_imports),
extra_standard_library: Some(
settings
.known_modules
.modules_for_known_type(ImportType::StandardLibrary)
.map(ToString::to_string)
.collect(),
),
force_single_line: Some(settings.force_single_line),
force_sort_within_sections: Some(settings.force_sort_within_sections),
case_sensitive: Some(settings.case_sensitive),
force_wrap_aliases: Some(settings.force_wrap_aliases),
detect_same_package: Some(settings.detect_same_package),
force_to_top: Some(settings.force_to_top.into_iter().collect()),
known_first_party: Some(
settings
.known_modules
.modules_for_known_type(ImportType::FirstParty)
.map(ToString::to_string)
.collect(),
),
known_third_party: Some(
settings
.known_modules
.modules_for_known_type(ImportType::ThirdParty)
.map(ToString::to_string)
.collect(),
),
known_local_folder: Some(
settings
.known_modules
.modules_for_known_type(ImportType::LocalFolder)
.map(ToString::to_string)
.collect(),
),
order_by_type: Some(settings.order_by_type),
relative_imports_order: Some(settings.relative_imports_order),
single_line_exclusions: Some(settings.single_line_exclusions.into_iter().collect()),
split_on_trailing_comma: Some(settings.split_on_trailing_comma),
classes: Some(settings.classes.into_iter().collect()),
constants: Some(settings.constants.into_iter().collect()),
variables: Some(settings.variables.into_iter().collect()),
no_lines_before: Some(settings.no_lines_before.into_iter().collect()),
lines_after_imports: Some(settings.lines_after_imports),
lines_between_types: Some(settings.lines_between_types),
forced_separate: Some(settings.forced_separate.into_iter().collect()),
section_order: Some(settings.section_order.into_iter().collect()),
sections: Some(
settings
.known_modules
.user_defined()
.into_iter()
.map(|(section, modules)| {
(
ImportSection::UserDefined(section.to_string()),
modules.into_iter().map(ToString::to_string).collect(),
)
})
.collect(),
),
}
}
}

View file

@ -1,37 +1,13 @@
//! Settings for the `mccabe` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "McCabeOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = "10",
value_type = "int",
example = r#"
# Flag errors (`C901`) whenever the complexity level exceeds 5.
max-complexity = 5
"#
)]
/// The maximum McCabe complexity to allow before triggering `C901` errors.
pub max_complexity: Option<usize>,
}
use ruff_macros::CacheKey;
#[derive(Debug, CacheKey)]
pub struct Settings {
pub max_complexity: usize,
}
const DEFAULT_MAX_COMPLEXITY: usize = 10;
pub const DEFAULT_MAX_COMPLEXITY: usize = 10;
impl Default for Settings {
fn default() -> Self {
@ -40,19 +16,3 @@ impl Default for Settings {
}
}
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
max_complexity: options.max_complexity.unwrap_or(DEFAULT_MAX_COMPLEXITY),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
max_complexity: Some(settings.max_complexity),
}
}
}

View file

@ -3,71 +3,10 @@
use std::error::Error;
use std::fmt;
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
use ruff_macros::CacheKey;
use crate::settings::types::IdentifierPattern;
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "Pep8NamingOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"["setUp", "tearDown", "setUpClass", "tearDownClass", "setUpModule", "tearDownModule", "asyncSetUp", "asyncTearDown", "setUpTestData", "failureException", "longMessage", "maxDiff"]"#,
value_type = "list[str]",
example = r#"
ignore-names = ["callMethod"]
"#
)]
/// A list of names (or patterns) to ignore when considering `pep8-naming` violations.
pub ignore_names: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"extend-ignore-names = ["callMethod"]"#
)]
/// Additional names (or patterns) to ignore when considering `pep8-naming` violations,
/// in addition to those included in `ignore-names`.
pub extend_ignore_names: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
# Allow Pydantic's `@validator` decorator to trigger class method treatment.
classmethod-decorators = ["pydantic.validator"]
"#
)]
/// A list of decorators that, when applied to a method, indicate that the
/// method should be treated as a class method (in addition to the builtin
/// `@classmethod`).
///
/// For example, Ruff will expect that any method decorated by a decorator
/// in this list takes a `cls` argument as its first argument.
pub classmethod_decorators: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
# Allow a shorthand alias, `@stcmthd`, to trigger static method treatment.
staticmethod-decorators = ["stcmthd"]
"#
)]
/// A list of decorators that, when applied to a method, indicate that the
/// method should be treated as a static method (in addition to the builtin
/// `@staticmethod`).
///
/// For example, Ruff will expect that any method decorated by a decorator
/// in this list has no `self` or `cls` argument.
pub staticmethod_decorators: Option<Vec<String>>,
}
#[derive(Debug, CacheKey)]
pub struct Settings {
pub ignore_names: Vec<IdentifierPattern>,
@ -75,7 +14,7 @@ pub struct Settings {
pub staticmethod_decorators: Vec<String>,
}
fn default_ignore_names() -> Vec<String> {
pub fn default_ignore_names() -> Vec<String> {
vec![
"setUp".to_string(),
"tearDown".to_string(),
@ -105,24 +44,6 @@ impl Default for Settings {
}
}
impl TryFrom<Options> for Settings {
type Error = SettingsError;
fn try_from(options: Options) -> Result<Self, Self::Error> {
Ok(Self {
ignore_names: options
.ignore_names
.unwrap_or_else(default_ignore_names)
.into_iter()
.chain(options.extend_ignore_names.unwrap_or_default())
.map(|name| IdentifierPattern::new(&name).map_err(SettingsError::InvalidIgnoreName))
.collect::<Result<Vec<_>, Self::Error>>()?,
classmethod_decorators: options.classmethod_decorators.unwrap_or_default(),
staticmethod_decorators: options.staticmethod_decorators.unwrap_or_default(),
})
}
}
/// Error returned by the [`TryFrom`] implementation of [`Settings`].
#[derive(Debug)]
pub enum SettingsError {
@ -146,20 +67,3 @@ impl Error for SettingsError {
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
ignore_names: Some(
settings
.ignore_names
.into_iter()
.map(|pattern| pattern.as_str().to_owned())
.collect(),
),
extend_ignore_names: None,
classmethod_decorators: Some(settings.classmethod_decorators),
staticmethod_decorators: Some(settings.staticmethod_decorators),
}
}
}

View file

@ -1,65 +1,11 @@
//! Settings for the `pycodestyle` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
use ruff_macros::CacheKey;
use crate::line_width::LineLength;
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(deny_unknown_fields, rename_all = "kebab-case", rename = "Pycodestyle")]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = "None",
value_type = "int",
example = r#"
max-doc-length = 88
"#
)]
/// The maximum line length to allow for line-length violations within
/// documentation (`W505`), including standalone comments. By default,
/// this is set to null which disables reporting violations.
///
/// See the [`doc-line-too-long`](https://beta.ruff.rs/docs/rules/doc-line-too-long/) rule for more information.
pub max_doc_length: Option<LineLength>,
#[option(
default = "false",
value_type = "bool",
example = r#"
ignore-overlong-task-comments = true
"#
)]
/// Whether line-length violations (`E501`) should be triggered for
/// comments starting with `task-tags` (by default: \["TODO", "FIXME",
/// and "XXX"\]).
pub ignore_overlong_task_comments: Option<bool>,
}
#[derive(Debug, Default, CacheKey)]
pub struct Settings {
pub max_doc_length: Option<LineLength>,
pub ignore_overlong_task_comments: bool,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
max_doc_length: options.max_doc_length,
ignore_overlong_task_comments: options
.ignore_overlong_task_comments
.unwrap_or_default(),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
max_doc_length: settings.max_doc_length,
ignore_overlong_task_comments: Some(settings.ignore_overlong_task_comments),
}
}
}

View file

@ -4,7 +4,7 @@ use std::collections::BTreeSet;
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
use ruff_macros::CacheKey;
use crate::registry::Rule;
@ -71,100 +71,9 @@ impl Convention {
}
}
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(deny_unknown_fields, rename_all = "kebab-case", rename = "Pydocstyle")]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"None"#,
value_type = r#""google" | "numpy" | "pep257""#,
example = r#"
# Use Google-style docstrings.
convention = "google"
"#
)]
/// Whether to use Google-style or NumPy-style conventions or the PEP257
/// defaults when analyzing docstring sections.
///
/// Enabling a convention will force-disable any rules that are not
/// included in the specified convention. As such, the intended use is
/// to enable a convention and then selectively disable any additional
/// rules on top of it.
///
/// For example, to use Google-style conventions but avoid requiring
/// documentation for every function parameter:
///
/// ```toml
/// [tool.ruff]
/// # Enable all `pydocstyle` rules, limiting to those that adhere to the
/// # Google convention via `convention = "google"`, below.
/// select = ["D"]
///
/// # On top of the Google convention, disable `D417`, which requires
/// # documentation for every function parameter.
/// ignore = ["D417"]
///
/// [tool.ruff.pydocstyle]
/// convention = "google"
/// ```
///
/// As conventions force-disable all rules not included in the convention,
/// enabling _additional_ rules on top of a convention is currently
/// unsupported.
pub convention: Option<Convention>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
ignore-decorators = ["typing.overload"]
"#
)]
/// Ignore docstrings for functions or methods decorated with the
/// specified fully-qualified decorators.
pub ignore_decorators: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
property-decorators = ["gi.repository.GObject.Property"]
"#
)]
/// A list of decorators that, when applied to a method, indicate that the
/// method should be treated as a property (in addition to the builtin
/// `@property` and standard-library `@functools.cached_property`).
///
/// For example, Ruff will expect that any method decorated by a decorator
/// in this list can use a non-imperative summary line.
pub property_decorators: Option<Vec<String>>,
}
#[derive(Debug, Default, CacheKey)]
pub struct Settings {
pub convention: Option<Convention>,
pub ignore_decorators: BTreeSet<String>,
pub property_decorators: BTreeSet<String>,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
convention: options.convention,
ignore_decorators: BTreeSet::from_iter(options.ignore_decorators.unwrap_or_default()),
property_decorators: BTreeSet::from_iter(
options.property_decorators.unwrap_or_default(),
),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
convention: settings.convention,
ignore_decorators: Some(settings.ignore_decorators.into_iter().collect()),
property_decorators: Some(settings.property_decorators.into_iter().collect()),
}
}
}

View file

@ -1,50 +1,8 @@
//! Settings for the `Pyflakes` plugin.
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
#[derive(
Debug, PartialEq, Eq, Default, Serialize, Deserialize, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "PyflakesOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = "extend-generics = [\"django.db.models.ForeignKey\"]"
)]
/// Additional functions or classes to consider generic, such that any
/// subscripts should be treated as type annotation (e.g., `ForeignKey` in
/// `django.db.models.ForeignKey["User"]`.
///
/// Expects to receive a list of fully-qualified names (e.g., `django.db.models.ForeignKey`,
/// rather than `ForeignKey`).
pub extend_generics: Option<Vec<String>>,
}
use ruff_macros::CacheKey;
#[derive(Debug, Default, CacheKey)]
pub struct Settings {
pub extend_generics: Vec<String>,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
extend_generics: options.extend_generics.unwrap_or_default(),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
extend_generics: Some(settings.extend_generics),
}
}
}

View file

@ -1,10 +1,10 @@
//! Settings for the `pylint` plugin.
use anyhow::anyhow;
use ruff_python_ast::Constant;
use serde::{Deserialize, Serialize};
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
use ruff_macros::CacheKey;
use ruff_python_ast::Constant;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
@ -35,43 +35,6 @@ impl TryFrom<&Constant> for ConstantType {
}
}
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "PylintOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"["str", "bytes"]"#,
value_type = r#"list["str" | "bytes" | "complex" | "float" | "int" | "tuple"]"#,
example = r#"
allow-magic-value-types = ["int"]
"#
)]
/// Constant types to ignore when used as "magic values" (see: `PLR2004`).
pub allow_magic_value_types: Option<Vec<ConstantType>>,
#[option(default = r"12", value_type = "int", example = r"max-branches = 12")]
/// Maximum number of branches allowed for a function or method body (see:
/// `PLR0912`).
pub max_branches: Option<usize>,
#[option(default = r"6", value_type = "int", example = r"max-returns = 6")]
/// Maximum number of return statements allowed for a function or method
/// body (see `PLR0911`)
pub max_returns: Option<usize>,
#[option(default = r"5", value_type = "int", example = r"max-args = 5")]
/// Maximum number of arguments allowed for a function or method definition
/// (see: `PLR0913`).
pub max_args: Option<usize>,
#[option(default = r"50", value_type = "int", example = r"max-statements = 50")]
/// Maximum number of statements allowed for a function or method body (see:
/// `PLR0915`).
pub max_statements: Option<usize>,
}
#[derive(Debug, CacheKey)]
pub struct Settings {
pub allow_magic_value_types: Vec<ConstantType>,
@ -92,30 +55,3 @@ impl Default for Settings {
}
}
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
let defaults = Settings::default();
Self {
allow_magic_value_types: options
.allow_magic_value_types
.unwrap_or(defaults.allow_magic_value_types),
max_args: options.max_args.unwrap_or(defaults.max_args),
max_returns: options.max_returns.unwrap_or(defaults.max_returns),
max_branches: options.max_branches.unwrap_or(defaults.max_branches),
max_statements: options.max_statements.unwrap_or(defaults.max_statements),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
allow_magic_value_types: Some(settings.allow_magic_value_types),
max_args: Some(settings.max_args),
max_returns: Some(settings.max_returns),
max_branches: Some(settings.max_branches),
max_statements: Some(settings.max_statements),
}
}
}

View file

@ -1,76 +1,8 @@
//! Settings for the `pyupgrade` plugin.
use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
use serde::{Deserialize, Serialize};
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, CombineOptions,
)]
#[serde(
deny_unknown_fields,
rename_all = "kebab-case",
rename = "PyUpgradeOptions"
)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"false"#,
value_type = "bool",
example = r#"
# Preserve types, even if a file imports `from __future__ import annotations`.
keep-runtime-typing = true
"#
)]
/// Whether to avoid PEP 585 (`List[int]` -> `list[int]`) and PEP 604
/// (`Union[str, int]` -> `str | int`) rewrites even if a file imports
/// `from __future__ import annotations`.
///
/// This setting is only applicable when the target Python version is below
/// 3.9 and 3.10 respectively, and is most commonly used when working with
/// libraries like Pydantic and FastAPI, which rely on the ability to parse
/// type annotations at runtime. The use of `from __future__ import annotations`
/// causes Python to treat the type annotations as strings, which typically
/// allows for the use of language features that appear in later Python
/// versions but are not yet supported by the current version (e.g., `str |
/// int`). However, libraries that rely on runtime type annotations will
/// break if the annotations are incompatible with the current Python
/// version.
///
/// For example, while the following is valid Python 3.8 code due to the
/// presence of `from __future__ import annotations`, the use of `str| int`
/// prior to Python 3.10 will cause Pydantic to raise a `TypeError` at
/// runtime:
///
/// ```python
/// from __future__ import annotations
///
/// import pydantic
///
/// class Foo(pydantic.BaseModel):
/// bar: str | int
/// ```
///
///
pub keep_runtime_typing: Option<bool>,
}
use ruff_macros::CacheKey;
#[derive(Debug, Default, CacheKey)]
pub struct Settings {
pub keep_runtime_typing: bool,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
keep_runtime_typing: options.keep_runtime_typing.unwrap_or_default(),
}
}
}
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
keep_runtime_typing: Some(settings.keep_runtime_typing),
}
}
}

View file

@ -1,381 +0,0 @@
//! User-provided program settings, taking into account pyproject.toml and
//! command-line options. Structure mirrors the user-facing representation of
//! the various parameters.
use std::borrow::Cow;
use std::env::VarError;
use std::path::{Path, PathBuf};
use anyhow::{anyhow, Result};
use glob::{glob, GlobError, Paths, PatternError};
use regex::Regex;
use shellexpand;
use shellexpand::LookupError;
use crate::fs;
use crate::line_width::{LineLength, TabSize};
use crate::rule_selector::RuleSelector;
use crate::rules::{
flake8_annotations, flake8_bandit, flake8_bugbear, flake8_builtins, flake8_comprehensions,
flake8_copyright, flake8_errmsg, flake8_gettext, flake8_implicit_str_concat,
flake8_import_conventions, flake8_pytest_style, flake8_quotes, flake8_self,
flake8_tidy_imports, flake8_type_checking, flake8_unused_arguments, isort, mccabe, pep8_naming,
pycodestyle, pydocstyle, pyflakes, pylint, pyupgrade,
};
use crate::settings::options::Options;
use crate::settings::types::{
FilePattern, PerFileIgnore, PythonVersion, SerializationFormat, Version,
};
#[derive(Debug, Default)]
pub struct RuleSelection {
pub select: Option<Vec<RuleSelector>>,
pub ignore: Vec<RuleSelector>,
pub extend_select: Vec<RuleSelector>,
pub fixable: Option<Vec<RuleSelector>>,
pub unfixable: Vec<RuleSelector>,
pub extend_fixable: Vec<RuleSelector>,
}
#[derive(Debug, Default)]
pub struct Configuration {
pub rule_selections: Vec<RuleSelection>,
pub per_file_ignores: Option<Vec<PerFileIgnore>>,
pub allowed_confusables: Option<Vec<char>>,
pub builtins: Option<Vec<String>>,
pub cache_dir: Option<PathBuf>,
pub dummy_variable_rgx: Option<Regex>,
pub exclude: Option<Vec<FilePattern>>,
pub extend: Option<PathBuf>,
pub extend_exclude: Vec<FilePattern>,
pub extend_include: Vec<FilePattern>,
pub extend_per_file_ignores: Vec<PerFileIgnore>,
pub external: Option<Vec<String>>,
pub fix: Option<bool>,
pub fix_only: Option<bool>,
pub force_exclude: Option<bool>,
pub format: Option<SerializationFormat>,
pub ignore_init_module_imports: Option<bool>,
pub include: Option<Vec<FilePattern>>,
pub line_length: Option<LineLength>,
pub logger_objects: Option<Vec<String>>,
pub namespace_packages: Option<Vec<PathBuf>>,
pub required_version: Option<Version>,
pub respect_gitignore: Option<bool>,
pub show_fixes: Option<bool>,
pub show_source: Option<bool>,
pub src: Option<Vec<PathBuf>>,
pub tab_size: Option<TabSize>,
pub target_version: Option<PythonVersion>,
pub task_tags: Option<Vec<String>>,
pub typing_modules: Option<Vec<String>>,
// Plugins
pub flake8_annotations: Option<flake8_annotations::settings::Options>,
pub flake8_bandit: Option<flake8_bandit::settings::Options>,
pub flake8_bugbear: Option<flake8_bugbear::settings::Options>,
pub flake8_builtins: Option<flake8_builtins::settings::Options>,
pub flake8_comprehensions: Option<flake8_comprehensions::settings::Options>,
pub flake8_copyright: Option<flake8_copyright::settings::Options>,
pub flake8_errmsg: Option<flake8_errmsg::settings::Options>,
pub flake8_gettext: Option<flake8_gettext::settings::Options>,
pub flake8_implicit_str_concat: Option<flake8_implicit_str_concat::settings::Options>,
pub flake8_import_conventions: Option<flake8_import_conventions::settings::Options>,
pub flake8_pytest_style: Option<flake8_pytest_style::settings::Options>,
pub flake8_quotes: Option<flake8_quotes::settings::Options>,
pub flake8_self: Option<flake8_self::settings::Options>,
pub flake8_tidy_imports: Option<flake8_tidy_imports::options::Options>,
pub flake8_type_checking: Option<flake8_type_checking::settings::Options>,
pub flake8_unused_arguments: Option<flake8_unused_arguments::settings::Options>,
pub isort: Option<isort::settings::Options>,
pub mccabe: Option<mccabe::settings::Options>,
pub pep8_naming: Option<pep8_naming::settings::Options>,
pub pycodestyle: Option<pycodestyle::settings::Options>,
pub pydocstyle: Option<pydocstyle::settings::Options>,
pub pyflakes: Option<pyflakes::settings::Options>,
pub pylint: Option<pylint::settings::Options>,
pub pyupgrade: Option<pyupgrade::settings::Options>,
}
impl Configuration {
pub fn from_options(options: Options, project_root: &Path) -> Result<Self> {
Ok(Self {
rule_selections: vec![RuleSelection {
select: options.select,
ignore: options
.ignore
.into_iter()
.flatten()
.chain(options.extend_ignore.into_iter().flatten())
.collect(),
extend_select: options.extend_select.unwrap_or_default(),
fixable: options.fixable,
unfixable: options
.unfixable
.into_iter()
.flatten()
.chain(options.extend_unfixable.into_iter().flatten())
.collect(),
extend_fixable: options.extend_fixable.unwrap_or_default(),
}],
allowed_confusables: options.allowed_confusables,
builtins: options.builtins,
cache_dir: options
.cache_dir
.map(|dir| {
let dir = shellexpand::full(&dir);
dir.map(|dir| PathBuf::from(dir.as_ref()))
})
.transpose()
.map_err(|e| anyhow!("Invalid `cache-dir` value: {e}"))?,
dummy_variable_rgx: options
.dummy_variable_rgx
.map(|pattern| Regex::new(&pattern))
.transpose()
.map_err(|e| anyhow!("Invalid `dummy-variable-rgx` value: {e}"))?,
exclude: options.exclude.map(|paths| {
paths
.into_iter()
.map(|pattern| {
let absolute = fs::normalize_path_to(&pattern, project_root);
FilePattern::User(pattern, absolute)
})
.collect()
}),
extend: options
.extend
.map(|extend| {
let extend = shellexpand::full(&extend);
extend.map(|extend| PathBuf::from(extend.as_ref()))
})
.transpose()
.map_err(|e| anyhow!("Invalid `extend` value: {e}"))?,
extend_exclude: options
.extend_exclude
.map(|paths| {
paths
.into_iter()
.map(|pattern| {
let absolute = fs::normalize_path_to(&pattern, project_root);
FilePattern::User(pattern, absolute)
})
.collect()
})
.unwrap_or_default(),
extend_include: options
.extend_include
.map(|paths| {
paths
.into_iter()
.map(|pattern| {
let absolute = fs::normalize_path_to(&pattern, project_root);
FilePattern::User(pattern, absolute)
})
.collect()
})
.unwrap_or_default(),
extend_per_file_ignores: options
.extend_per_file_ignores
.map(|per_file_ignores| {
per_file_ignores
.into_iter()
.map(|(pattern, prefixes)| {
PerFileIgnore::new(pattern, &prefixes, Some(project_root))
})
.collect()
})
.unwrap_or_default(),
external: options.external,
fix: options.fix,
fix_only: options.fix_only,
format: options.format,
force_exclude: options.force_exclude,
ignore_init_module_imports: options.ignore_init_module_imports,
include: options.include.map(|paths| {
paths
.into_iter()
.map(|pattern| {
let absolute = fs::normalize_path_to(&pattern, project_root);
FilePattern::User(pattern, absolute)
})
.collect()
}),
line_length: options.line_length,
tab_size: options.tab_size,
namespace_packages: options
.namespace_packages
.map(|namespace_package| resolve_src(&namespace_package, project_root))
.transpose()?,
per_file_ignores: options.per_file_ignores.map(|per_file_ignores| {
per_file_ignores
.into_iter()
.map(|(pattern, prefixes)| {
PerFileIgnore::new(pattern, &prefixes, Some(project_root))
})
.collect()
}),
required_version: options.required_version,
respect_gitignore: options.respect_gitignore,
show_source: options.show_source,
show_fixes: options.show_fixes,
src: options
.src
.map(|src| resolve_src(&src, project_root))
.transpose()?,
target_version: options.target_version,
task_tags: options.task_tags,
logger_objects: options.logger_objects,
typing_modules: options.typing_modules,
// Plugins
flake8_annotations: options.flake8_annotations,
flake8_bandit: options.flake8_bandit,
flake8_bugbear: options.flake8_bugbear,
flake8_builtins: options.flake8_builtins,
flake8_comprehensions: options.flake8_comprehensions,
flake8_copyright: options.flake8_copyright,
flake8_errmsg: options.flake8_errmsg,
flake8_gettext: options.flake8_gettext,
flake8_implicit_str_concat: options.flake8_implicit_str_concat,
flake8_import_conventions: options.flake8_import_conventions,
flake8_pytest_style: options.flake8_pytest_style,
flake8_quotes: options.flake8_quotes,
flake8_self: options.flake8_self,
flake8_tidy_imports: options.flake8_tidy_imports,
flake8_type_checking: options.flake8_type_checking,
flake8_unused_arguments: options.flake8_unused_arguments,
isort: options.isort,
mccabe: options.mccabe,
pep8_naming: options.pep8_naming,
pycodestyle: options.pycodestyle,
pydocstyle: options.pydocstyle,
pyflakes: options.pyflakes,
pylint: options.pylint,
pyupgrade: options.pyupgrade,
})
}
#[must_use]
pub fn combine(self, config: Self) -> Self {
Self {
rule_selections: config
.rule_selections
.into_iter()
.chain(self.rule_selections)
.collect(),
allowed_confusables: self.allowed_confusables.or(config.allowed_confusables),
builtins: self.builtins.or(config.builtins),
cache_dir: self.cache_dir.or(config.cache_dir),
dummy_variable_rgx: self.dummy_variable_rgx.or(config.dummy_variable_rgx),
exclude: self.exclude.or(config.exclude),
extend: self.extend.or(config.extend),
extend_exclude: config
.extend_exclude
.into_iter()
.chain(self.extend_exclude)
.collect(),
extend_include: config
.extend_include
.into_iter()
.chain(self.extend_include)
.collect(),
extend_per_file_ignores: config
.extend_per_file_ignores
.into_iter()
.chain(self.extend_per_file_ignores)
.collect(),
external: self.external.or(config.external),
fix: self.fix.or(config.fix),
fix_only: self.fix_only.or(config.fix_only),
format: self.format.or(config.format),
force_exclude: self.force_exclude.or(config.force_exclude),
include: self.include.or(config.include),
ignore_init_module_imports: self
.ignore_init_module_imports
.or(config.ignore_init_module_imports),
line_length: self.line_length.or(config.line_length),
logger_objects: self.logger_objects.or(config.logger_objects),
tab_size: self.tab_size.or(config.tab_size),
namespace_packages: self.namespace_packages.or(config.namespace_packages),
per_file_ignores: self.per_file_ignores.or(config.per_file_ignores),
required_version: self.required_version.or(config.required_version),
respect_gitignore: self.respect_gitignore.or(config.respect_gitignore),
show_source: self.show_source.or(config.show_source),
show_fixes: self.show_fixes.or(config.show_fixes),
src: self.src.or(config.src),
target_version: self.target_version.or(config.target_version),
task_tags: self.task_tags.or(config.task_tags),
typing_modules: self.typing_modules.or(config.typing_modules),
// Plugins
flake8_annotations: self.flake8_annotations.combine(config.flake8_annotations),
flake8_bandit: self.flake8_bandit.combine(config.flake8_bandit),
flake8_bugbear: self.flake8_bugbear.combine(config.flake8_bugbear),
flake8_builtins: self.flake8_builtins.combine(config.flake8_builtins),
flake8_comprehensions: self
.flake8_comprehensions
.combine(config.flake8_comprehensions),
flake8_copyright: self.flake8_copyright.combine(config.flake8_copyright),
flake8_errmsg: self.flake8_errmsg.combine(config.flake8_errmsg),
flake8_gettext: self.flake8_gettext.combine(config.flake8_gettext),
flake8_implicit_str_concat: self
.flake8_implicit_str_concat
.combine(config.flake8_implicit_str_concat),
flake8_import_conventions: self
.flake8_import_conventions
.combine(config.flake8_import_conventions),
flake8_pytest_style: self.flake8_pytest_style.combine(config.flake8_pytest_style),
flake8_quotes: self.flake8_quotes.combine(config.flake8_quotes),
flake8_self: self.flake8_self.combine(config.flake8_self),
flake8_tidy_imports: self.flake8_tidy_imports.combine(config.flake8_tidy_imports),
flake8_type_checking: self
.flake8_type_checking
.combine(config.flake8_type_checking),
flake8_unused_arguments: self
.flake8_unused_arguments
.combine(config.flake8_unused_arguments),
isort: self.isort.combine(config.isort),
mccabe: self.mccabe.combine(config.mccabe),
pep8_naming: self.pep8_naming.combine(config.pep8_naming),
pycodestyle: self.pycodestyle.combine(config.pycodestyle),
pydocstyle: self.pydocstyle.combine(config.pydocstyle),
pyflakes: self.pyflakes.combine(config.pyflakes),
pylint: self.pylint.combine(config.pylint),
pyupgrade: self.pyupgrade.combine(config.pyupgrade),
}
}
}
pub trait CombinePluginOptions {
#[must_use]
fn combine(self, other: Self) -> Self;
}
impl<T: CombinePluginOptions> CombinePluginOptions for Option<T> {
fn combine(self, other: Self) -> Self {
match (self, other) {
(Some(base), Some(other)) => Some(base.combine(other)),
(Some(base), None) => Some(base),
(None, Some(other)) => Some(other),
(None, None) => None,
}
}
}
/// Given a list of source paths, which could include glob patterns, resolve the
/// matching paths.
pub fn resolve_src(src: &[String], project_root: &Path) -> Result<Vec<PathBuf>> {
let expansions = src
.iter()
.map(shellexpand::full)
.collect::<Result<Vec<Cow<'_, str>>, LookupError<VarError>>>()?;
let globs = expansions
.iter()
.map(|path| Path::new(path.as_ref()))
.map(|path| fs::normalize_path_to(path, project_root))
.map(|path| glob(&path.to_string_lossy()))
.collect::<Result<Vec<Paths>, PatternError>>()?;
let paths: Vec<PathBuf> = globs
.into_iter()
.flatten()
.collect::<Result<Vec<PathBuf>, GlobError>>()?;
Ok(paths)
}

View file

@ -2,19 +2,16 @@
//! command-line options. Structure is optimized for internal usage, as opposed
//! to external visibility or parsing.
use std::path::{Path, PathBuf};
use std::path::PathBuf;
use anyhow::{anyhow, Result};
use anyhow::Result;
use globset::{Glob, GlobMatcher};
use regex::Regex;
use rustc_hash::{FxHashMap, FxHashSet};
use strum::IntoEnumIterator;
use rustc_hash::FxHashSet;
use ruff_cache::cache_dir;
use ruff_macros::CacheKey;
use crate::registry::{Rule, RuleNamespace, RuleSet, INCOMPATIBLE_CODES};
use crate::rule_selector::{RuleSelector, Specificity};
use crate::registry::{Rule, RuleSet};
use crate::rules::{
flake8_annotations, flake8_bandit, flake8_bugbear, flake8_builtins, flake8_comprehensions,
flake8_copyright, flake8_errmsg, flake8_gettext, flake8_implicit_str_concat,
@ -22,49 +19,23 @@ use crate::rules::{
flake8_tidy_imports, flake8_type_checking, flake8_unused_arguments, isort, mccabe, pep8_naming,
pycodestyle, pydocstyle, pyflakes, pylint, pyupgrade,
};
use crate::settings::configuration::Configuration;
use crate::settings::types::{FilePatternSet, PerFileIgnore, PythonVersion, SerializationFormat};
use crate::warn_user_once_by_id;
use self::rule_table::RuleTable;
use super::line_width::{LineLength, TabSize};
pub mod configuration;
use self::rule_table::RuleTable;
pub mod defaults;
pub mod flags;
pub mod options;
pub mod options_base;
pub mod pyproject;
pub mod rule_table;
pub mod types;
const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
#[derive(Debug, Default)]
pub struct AllSettings {
pub cli: CliSettings,
pub lib: Settings,
}
impl AllSettings {
pub fn from_configuration(config: Configuration, project_root: &Path) -> Result<Self> {
Ok(Self {
cli: CliSettings {
cache_dir: config
.cache_dir
.clone()
.unwrap_or_else(|| cache_dir(project_root)),
fix: config.fix.unwrap_or(false),
fix_only: config.fix_only.unwrap_or(false),
format: config.format.unwrap_or_default(),
show_fixes: config.show_fixes.unwrap_or(false),
show_source: config.show_source.unwrap_or(false),
},
lib: Settings::from_configuration(config, project_root)?,
})
}
}
#[derive(Debug, Default, Clone)]
#[allow(clippy::struct_excessive_bools)]
/// Settings that are not used by this library and only here so that `ruff_cli` can use them.
@ -135,167 +106,6 @@ pub struct Settings {
}
impl Settings {
pub fn from_configuration(config: Configuration, project_root: &Path) -> Result<Self> {
if let Some(required_version) = &config.required_version {
if &**required_version != CARGO_PKG_VERSION {
return Err(anyhow!(
"Required version `{}` does not match the running version `{}`",
&**required_version,
CARGO_PKG_VERSION
));
}
}
Ok(Self {
rules: (&config).into(),
allowed_confusables: config
.allowed_confusables
.map(FxHashSet::from_iter)
.unwrap_or_default(),
builtins: config.builtins.unwrap_or_default(),
dummy_variable_rgx: config
.dummy_variable_rgx
.unwrap_or_else(|| defaults::DUMMY_VARIABLE_RGX.clone()),
exclude: FilePatternSet::try_from_vec(
config.exclude.unwrap_or_else(|| defaults::EXCLUDE.clone()),
)?,
extend_exclude: FilePatternSet::try_from_vec(config.extend_exclude)?,
extend_include: FilePatternSet::try_from_vec(config.extend_include)?,
external: FxHashSet::from_iter(config.external.unwrap_or_default()),
force_exclude: config.force_exclude.unwrap_or(false),
include: FilePatternSet::try_from_vec(
config.include.unwrap_or_else(|| defaults::INCLUDE.clone()),
)?,
ignore_init_module_imports: config.ignore_init_module_imports.unwrap_or_default(),
line_length: config.line_length.unwrap_or_default(),
tab_size: config.tab_size.unwrap_or_default(),
namespace_packages: config.namespace_packages.unwrap_or_default(),
per_file_ignores: resolve_per_file_ignores(
config
.per_file_ignores
.unwrap_or_default()
.into_iter()
.chain(config.extend_per_file_ignores)
.collect(),
)?,
respect_gitignore: config.respect_gitignore.unwrap_or(true),
src: config
.src
.unwrap_or_else(|| vec![project_root.to_path_buf()]),
project_root: project_root.to_path_buf(),
target_version: config.target_version.unwrap_or_default(),
task_tags: config.task_tags.unwrap_or_else(|| {
defaults::TASK_TAGS
.iter()
.map(ToString::to_string)
.collect()
}),
logger_objects: config.logger_objects.unwrap_or_default(),
typing_modules: config.typing_modules.unwrap_or_default(),
// Plugins
flake8_annotations: config
.flake8_annotations
.map(flake8_annotations::settings::Settings::from)
.unwrap_or_default(),
flake8_bandit: config
.flake8_bandit
.map(flake8_bandit::settings::Settings::from)
.unwrap_or_default(),
flake8_bugbear: config
.flake8_bugbear
.map(flake8_bugbear::settings::Settings::from)
.unwrap_or_default(),
flake8_builtins: config
.flake8_builtins
.map(flake8_builtins::settings::Settings::from)
.unwrap_or_default(),
flake8_comprehensions: config
.flake8_comprehensions
.map(flake8_comprehensions::settings::Settings::from)
.unwrap_or_default(),
flake8_copyright: config
.flake8_copyright
.map(flake8_copyright::settings::Settings::try_from)
.transpose()?
.unwrap_or_default(),
flake8_errmsg: config
.flake8_errmsg
.map(flake8_errmsg::settings::Settings::from)
.unwrap_or_default(),
flake8_implicit_str_concat: config
.flake8_implicit_str_concat
.map(flake8_implicit_str_concat::settings::Settings::from)
.unwrap_or_default(),
flake8_import_conventions: config
.flake8_import_conventions
.map(flake8_import_conventions::settings::Settings::from)
.unwrap_or_default(),
flake8_pytest_style: config
.flake8_pytest_style
.map(flake8_pytest_style::settings::Settings::try_from)
.transpose()?
.unwrap_or_default(),
flake8_quotes: config
.flake8_quotes
.map(flake8_quotes::settings::Settings::from)
.unwrap_or_default(),
flake8_self: config
.flake8_self
.map(flake8_self::settings::Settings::from)
.unwrap_or_default(),
flake8_tidy_imports: config
.flake8_tidy_imports
.map(flake8_tidy_imports::settings::Settings::from)
.unwrap_or_default(),
flake8_type_checking: config
.flake8_type_checking
.map(flake8_type_checking::settings::Settings::from)
.unwrap_or_default(),
flake8_unused_arguments: config
.flake8_unused_arguments
.map(flake8_unused_arguments::settings::Settings::from)
.unwrap_or_default(),
flake8_gettext: config
.flake8_gettext
.map(flake8_gettext::settings::Settings::from)
.unwrap_or_default(),
isort: config
.isort
.map(isort::settings::Settings::try_from)
.transpose()?
.unwrap_or_default(),
mccabe: config
.mccabe
.map(mccabe::settings::Settings::from)
.unwrap_or_default(),
pep8_naming: config
.pep8_naming
.map(pep8_naming::settings::Settings::try_from)
.transpose()?
.unwrap_or_default(),
pycodestyle: config
.pycodestyle
.map(pycodestyle::settings::Settings::from)
.unwrap_or_default(),
pydocstyle: config
.pydocstyle
.map(pydocstyle::settings::Settings::from)
.unwrap_or_default(),
pyflakes: config
.pyflakes
.map(pyflakes::settings::Settings::from)
.unwrap_or_default(),
pylint: config
.pylint
.map(pylint::settings::Settings::from)
.unwrap_or_default(),
pyupgrade: config
.pyupgrade
.map(pyupgrade::settings::Settings::from)
.unwrap_or_default(),
})
}
pub fn for_rule(rule_code: Rule) -> Self {
Self {
rules: RuleTable::from_iter([rule_code]),
@ -320,200 +130,6 @@ impl Settings {
}
}
impl From<&Configuration> for RuleTable {
fn from(config: &Configuration) -> Self {
// The select_set keeps track of which rules have been selected.
let mut select_set: RuleSet = defaults::PREFIXES.iter().flatten().collect();
// The fixable set keeps track of which rules are fixable.
let mut fixable_set: RuleSet = RuleSelector::All
.into_iter()
.chain(&RuleSelector::Nursery)
.collect();
// Ignores normally only subtract from the current set of selected
// rules. By that logic the ignore in `select = [], ignore = ["E501"]`
// would be effectless. Instead we carry over the ignores to the next
// selection in that case, creating a way for ignores to be reused
// across config files (which otherwise wouldn't be possible since ruff
// only has `extended` but no `extended-by`).
let mut carryover_ignores: Option<&[RuleSelector]> = None;
let mut carryover_unfixables: Option<&[RuleSelector]> = None;
let mut redirects = FxHashMap::default();
for selection in &config.rule_selections {
// If a selection only specifies extend-select we cannot directly
// apply its rule selectors to the select_set because we firstly have
// to resolve the effectively selected rules within the current rule selection
// (taking specificity into account since more specific selectors take
// precedence over less specific selectors within a rule selection).
// We do this via the following HashMap where the bool indicates
// whether to enable or disable the given rule.
let mut select_map_updates: FxHashMap<Rule, bool> = FxHashMap::default();
let mut fixable_map_updates: FxHashMap<Rule, bool> = FxHashMap::default();
let carriedover_ignores = carryover_ignores.take();
let carriedover_unfixables = carryover_unfixables.take();
for spec in Specificity::iter() {
// Iterate over rule selectors in order of specificity.
for selector in selection
.select
.iter()
.flatten()
.chain(selection.extend_select.iter())
.filter(|s| s.specificity() == spec)
{
for rule in selector {
select_map_updates.insert(rule, true);
}
}
for selector in selection
.ignore
.iter()
.chain(carriedover_ignores.into_iter().flatten())
.filter(|s| s.specificity() == spec)
{
for rule in selector {
select_map_updates.insert(rule, false);
}
}
// Apply the same logic to `fixable` and `unfixable`.
for selector in selection
.fixable
.iter()
.flatten()
.chain(selection.extend_fixable.iter())
.filter(|s| s.specificity() == spec)
{
for rule in selector {
fixable_map_updates.insert(rule, true);
}
}
for selector in selection
.unfixable
.iter()
.chain(carriedover_unfixables.into_iter().flatten())
.filter(|s| s.specificity() == spec)
{
for rule in selector {
fixable_map_updates.insert(rule, false);
}
}
}
if let Some(select) = &selection.select {
// If the `select` option is given we reassign the whole select_set
// (overriding everything that has been defined previously).
select_set = select_map_updates
.into_iter()
.filter_map(|(rule, enabled)| enabled.then_some(rule))
.collect();
if select.is_empty()
&& selection.extend_select.is_empty()
&& !selection.ignore.is_empty()
{
carryover_ignores = Some(&selection.ignore);
}
} else {
// Otherwise we apply the updates on top of the existing select_set.
for (rule, enabled) in select_map_updates {
if enabled {
select_set.insert(rule);
} else {
select_set.remove(rule);
}
}
}
// Apply the same logic to `fixable` and `unfixable`.
if let Some(fixable) = &selection.fixable {
fixable_set = fixable_map_updates
.into_iter()
.filter_map(|(rule, enabled)| enabled.then_some(rule))
.collect();
if fixable.is_empty()
&& selection.extend_fixable.is_empty()
&& !selection.unfixable.is_empty()
{
carryover_unfixables = Some(&selection.unfixable);
}
} else {
for (rule, enabled) in fixable_map_updates {
if enabled {
fixable_set.insert(rule);
} else {
fixable_set.remove(rule);
}
}
}
// We insert redirects into the hashmap so that we
// can warn the users about remapped rule codes.
for selector in selection
.select
.iter()
.chain(selection.fixable.iter())
.flatten()
.chain(selection.ignore.iter())
.chain(selection.extend_select.iter())
.chain(selection.unfixable.iter())
.chain(selection.extend_fixable.iter())
{
if let RuleSelector::Prefix {
prefix,
redirected_from: Some(redirect_from),
} = selector
{
redirects.insert(redirect_from, prefix);
}
}
}
for (from, target) in redirects {
// TODO(martin): This belongs into the ruff_cli crate.
warn_user_once_by_id!(
from,
"`{from}` has been remapped to `{}{}`.",
target.linter().common_prefix(),
target.short_code()
);
}
let mut rules = Self::empty();
for rule in select_set {
let fix = fixable_set.contains(rule);
rules.enable(rule, fix);
}
// If a docstring convention is specified, force-disable any incompatible error
// codes.
if let Some(convention) = config
.pydocstyle
.as_ref()
.and_then(|pydocstyle| pydocstyle.convention)
{
for rule in convention.rules_to_be_ignored() {
rules.disable(*rule);
}
}
// Validate that we didn't enable any incompatible rules. Use this awkward
// approach to give each pair it's own `warn_user_once`.
for (preferred, expendable, message) in INCOMPATIBLE_CODES {
if rules.enabled(*preferred) && rules.enabled(*expendable) {
warn_user_once_by_id!(expendable.as_ref(), "{}", message);
rules.disable(*expendable);
}
}
rules
}
}
/// Given a list of patterns, create a `GlobSet`.
pub fn resolve_per_file_ignores(
per_file_ignores: Vec<PerFileIgnore>,
@ -532,157 +148,3 @@ pub fn resolve_per_file_ignores(
})
.collect()
}
#[cfg(test)]
mod tests {
use crate::codes::Pycodestyle;
use crate::registry::{Rule, RuleSet};
use crate::settings::configuration::Configuration;
use crate::settings::rule_table::RuleTable;
use super::configuration::RuleSelection;
#[allow(clippy::needless_pass_by_value)]
fn resolve_rules(selections: impl IntoIterator<Item = RuleSelection>) -> RuleSet {
RuleTable::from(&Configuration {
rule_selections: selections.into_iter().collect(),
..Configuration::default()
})
.iter_enabled()
.collect()
}
#[test]
fn rule_codes() {
let actual = resolve_rules([RuleSelection {
select: Some(vec![Pycodestyle::W.into()]),
..RuleSelection::default()
}]);
let expected = RuleSet::from_rules(&[
Rule::TrailingWhitespace,
Rule::MissingNewlineAtEndOfFile,
Rule::BlankLineWithWhitespace,
Rule::DocLineTooLong,
Rule::InvalidEscapeSequence,
Rule::TabIndentation,
]);
assert_eq!(actual, expected);
let actual = resolve_rules([RuleSelection {
select: Some(vec![Pycodestyle::W6.into()]),
..RuleSelection::default()
}]);
let expected = RuleSet::from_rule(Rule::InvalidEscapeSequence);
assert_eq!(actual, expected);
let actual = resolve_rules([RuleSelection {
select: Some(vec![Pycodestyle::W.into()]),
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
}]);
let expected = RuleSet::from_rules(&[
Rule::TrailingWhitespace,
Rule::BlankLineWithWhitespace,
Rule::DocLineTooLong,
Rule::InvalidEscapeSequence,
Rule::TabIndentation,
]);
assert_eq!(actual, expected);
let actual = resolve_rules([RuleSelection {
select: Some(vec![Pycodestyle::W292.into()]),
ignore: vec![Pycodestyle::W.into()],
..RuleSelection::default()
}]);
let expected = RuleSet::from_rule(Rule::MissingNewlineAtEndOfFile);
assert_eq!(actual, expected);
let actual = resolve_rules([RuleSelection {
select: Some(vec![Pycodestyle::W605.into()]),
ignore: vec![Pycodestyle::W605.into()],
..RuleSelection::default()
}]);
let expected = RuleSet::empty();
assert_eq!(actual, expected);
let actual = resolve_rules([
RuleSelection {
select: Some(vec![Pycodestyle::W.into()]),
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
RuleSelection {
extend_select: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
]);
let expected = RuleSet::from_rules(&[
Rule::TrailingWhitespace,
Rule::MissingNewlineAtEndOfFile,
Rule::BlankLineWithWhitespace,
Rule::DocLineTooLong,
Rule::InvalidEscapeSequence,
Rule::TabIndentation,
]);
assert_eq!(actual, expected);
let actual = resolve_rules([
RuleSelection {
select: Some(vec![Pycodestyle::W.into()]),
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
RuleSelection {
extend_select: vec![Pycodestyle::W292.into()],
ignore: vec![Pycodestyle::W.into()],
..RuleSelection::default()
},
]);
let expected = RuleSet::from_rule(Rule::MissingNewlineAtEndOfFile);
assert_eq!(actual, expected);
}
#[test]
fn carry_over_ignore() {
let actual = resolve_rules([
RuleSelection {
select: Some(vec![]),
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
RuleSelection {
select: Some(vec![Pycodestyle::W.into()]),
..RuleSelection::default()
},
]);
let expected = RuleSet::from_rules(&[
Rule::TrailingWhitespace,
Rule::BlankLineWithWhitespace,
Rule::DocLineTooLong,
Rule::InvalidEscapeSequence,
Rule::TabIndentation,
]);
assert_eq!(actual, expected);
let actual = resolve_rules([
RuleSelection {
select: Some(vec![]),
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
RuleSelection {
select: Some(vec![Pycodestyle::W.into()]),
ignore: vec![Pycodestyle::W505.into()],
..RuleSelection::default()
},
]);
let expected = RuleSet::from_rules(&[
Rule::TrailingWhitespace,
Rule::BlankLineWithWhitespace,
Rule::InvalidEscapeSequence,
Rule::TabIndentation,
]);
assert_eq!(actual, expected);
}
}

View file

@ -1,611 +0,0 @@
//! Options that the user can provide via pyproject.toml.
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use ruff_macros::ConfigurationOptions;
use crate::line_width::{LineLength, TabSize};
use crate::rule_selector::RuleSelector;
use crate::rules::{
flake8_annotations, flake8_bandit, flake8_bugbear, flake8_builtins, flake8_comprehensions,
flake8_copyright, flake8_errmsg, flake8_gettext, flake8_implicit_str_concat,
flake8_import_conventions, flake8_pytest_style, flake8_quotes, flake8_self,
flake8_tidy_imports, flake8_type_checking, flake8_unused_arguments, isort, mccabe, pep8_naming,
pycodestyle, pydocstyle, pyflakes, pylint, pyupgrade,
};
use crate::settings::types::{PythonVersion, SerializationFormat, Version};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct Options {
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
# Allow minus-sign (U+2212), greek-small-letter-rho (U+03C1), and the asterisk-operator (U+2217),
# which could be confused for "-", "p", and "*", respectively.
allowed-confusables = ["", "ρ", ""]
"#
)]
/// A list of allowed "confusable" Unicode characters to ignore when
/// enforcing `RUF001`, `RUF002`, and `RUF003`.
pub allowed_confusables: Option<Vec<char>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
builtins = ["_"]
"#
)]
/// A list of builtins to treat as defined references, in addition to the
/// system builtins.
pub builtins: Option<Vec<String>>,
#[option(
default = ".ruff_cache",
value_type = "str",
example = r#"cache-dir = "~/.cache/ruff""#
)]
/// A path to the cache directory.
///
/// By default, Ruff stores cache results in a `.ruff_cache` directory in
/// the current project root.
///
/// However, Ruff will also respect the `RUFF_CACHE_DIR` environment
/// variable, which takes precedence over that default.
///
/// This setting will override even the `RUFF_CACHE_DIR` environment
/// variable, if set.
pub cache_dir: Option<String>,
#[option(
default = r#""^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$""#,
value_type = "re.Pattern",
example = r#"
# Only ignore variables named "_".
dummy-variable-rgx = "^_$"
"#
)]
/// A regular expression used to identify "dummy" variables, or those which
/// should be ignored when enforcing (e.g.) unused-variable rules. The
/// default expression matches `_`, `__`, and `_var`, but not `_var_`.
pub dummy_variable_rgx: Option<String>,
#[option(
default = r#"[".bzr", ".direnv", ".eggs", ".git", ".git-rewrite", ".hg", ".mypy_cache", ".nox", ".pants.d", ".pytype", ".ruff_cache", ".svn", ".tox", ".venv", "__pypackages__", "_build", "buck-out", "build", "dist", "node_modules", "venv"]"#,
value_type = "list[str]",
example = r#"
exclude = [".venv"]
"#
)]
/// A list of file patterns to exclude from linting.
///
/// Exclusions are based on globs, and can be either:
///
/// - Single-path patterns, like `.mypy_cache` (to exclude any directory
/// named `.mypy_cache` in the tree), `foo.py` (to exclude any file named
/// `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ).
/// - Relative patterns, like `directory/foo.py` (to exclude that specific
/// file) or `directory/*.py` (to exclude any Python files in
/// `directory`). Note that these paths are relative to the project root
/// (e.g., the directory containing your `pyproject.toml`).
///
/// For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
///
/// Note that you'll typically want to use
/// [`extend-exclude`](#extend-exclude) to modify the excluded paths.
pub exclude: Option<Vec<String>>,
#[option(
default = r#"None"#,
value_type = "str",
example = r#"
# Extend the `pyproject.toml` file in the parent directory.
extend = "../pyproject.toml"
# But use a different line length.
line-length = 100
"#
)]
/// A path to a local `pyproject.toml` file to merge into this
/// configuration. User home directory and environment variables will be
/// expanded.
///
/// To resolve the current `pyproject.toml` file, Ruff will first resolve
/// this base configuration file, then merge in any properties defined
/// in the current configuration file.
pub extend: Option<String>,
#[option(
default = "[]",
value_type = "list[str]",
example = r#"
# In addition to the standard set of exclusions, omit all tests, plus a specific file.
extend-exclude = ["tests", "src/bad.py"]
"#
)]
/// A list of file patterns to omit from linting, in addition to those
/// specified by `exclude`.
///
/// Exclusions are based on globs, and can be either:
///
/// - Single-path patterns, like `.mypy_cache` (to exclude any directory
/// named `.mypy_cache` in the tree), `foo.py` (to exclude any file named
/// `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ).
/// - Relative patterns, like `directory/foo.py` (to exclude that specific
/// file) or `directory/*.py` (to exclude any Python files in
/// `directory`). Note that these paths are relative to the project root
/// (e.g., the directory containing your `pyproject.toml`).
///
/// For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
pub extend_exclude: Option<Vec<String>>,
#[option(
default = "[]",
value_type = "list[str]",
example = r#"
# In addition to the standard set of inclusions, include `.pyw` files.
extend-include = ["*.pyw"]
"#
)]
/// A list of file patterns to include when linting, in addition to those
/// specified by `include`.
///
/// Inclusion are based on globs, and should be single-path patterns, like
/// `*.pyw`, to include any file with the `.pyw` extension.
///
/// For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
pub extend_include: Option<Vec<String>>,
#[option(
default = "[]",
value_type = "list[RuleSelector]",
example = r#"
# Skip unused variable rules (`F841`).
extend-ignore = ["F841"]
"#
)]
/// A list of rule codes or prefixes to ignore, in addition to those
/// specified by `ignore`.
///
/// This option has been **deprecated** in favor of `ignore`
/// since its usage is now interchangeable with `ignore`.
#[cfg_attr(feature = "schemars", schemars(skip))]
pub extend_ignore: Option<Vec<RuleSelector>>,
#[option(
default = "[]",
value_type = "list[RuleSelector]",
example = r#"
# On top of the default `select` (`E`, `F`), enable flake8-bugbear (`B`) and flake8-quotes (`Q`).
extend-select = ["B", "Q"]
"#
)]
/// A list of rule codes or prefixes to enable, in addition to those
/// specified by `select`.
pub extend_select: Option<Vec<RuleSelector>>,
#[option(
default = r#"[]"#,
value_type = "list[RuleSelector]",
example = r#"
# Enable autofix for flake8-bugbear (`B`), on top of any rules specified by `fixable`.
extend-fixable = ["B"]
"#
)]
/// A list of rule codes or prefixes to consider autofixable, in addition to those
/// specified by `fixable`.
pub extend_fixable: Option<Vec<RuleSelector>>,
/// A list of rule codes or prefixes to consider non-auto-fixable, in addition to those
/// specified by `unfixable`.
///
/// This option has been **deprecated** in favor of `unfixable` since its usage is now
/// interchangeable with `unfixable`.
#[cfg_attr(feature = "schemars", schemars(skip))]
pub extend_unfixable: Option<Vec<RuleSelector>>,
#[option(
default = "[]",
value_type = "list[str]",
example = r#"
# Avoiding flagging (and removing) `V101` from any `# noqa`
# directives, despite Ruff's lack of support for `vulture`.
external = ["V101"]
"#
)]
/// A list of rule codes that are unsupported by Ruff, but should be
/// preserved when (e.g.) validating `# noqa` directives. Useful for
/// retaining `# noqa` directives that cover plugins not yet implemented
/// by Ruff.
pub external: Option<Vec<String>>,
#[option(default = "false", value_type = "bool", example = "fix = true")]
/// Enable autofix behavior by-default when running `ruff` (overridden
/// by the `--fix` and `--no-fix` command-line flags).
pub fix: Option<bool>,
#[option(default = "false", value_type = "bool", example = "fix-only = true")]
/// Like `fix`, but disables reporting on leftover violation. Implies `fix`.
pub fix_only: Option<bool>,
#[option(
default = r#"["ALL"]"#,
value_type = "list[RuleSelector]",
example = r#"
# Only allow autofix behavior for `E` and `F` rules.
fixable = ["E", "F"]
"#
)]
/// A list of rule codes or prefixes to consider autofixable. By default,
/// all rules are considered autofixable.
pub fixable: Option<Vec<RuleSelector>>,
#[option(
default = r#""text""#,
value_type = r#""text" | "json" | "junit" | "github" | "gitlab" | "pylint" | "azure""#,
example = r#"
# Group violations by containing file.
format = "grouped"
"#
)]
/// The style in which violation messages should be formatted: `"text"`
/// (default), `"grouped"` (group messages by file), `"json"`
/// (machine-readable), `"junit"` (machine-readable XML), `"github"` (GitHub
/// Actions annotations), `"gitlab"` (GitLab CI code quality report),
/// `"pylint"` (Pylint text format) or `"azure"` (Azure Pipeline logging commands).
pub format: Option<SerializationFormat>,
#[option(
default = r#"false"#,
value_type = "bool",
example = r#"
force-exclude = true
"#
)]
/// Whether to enforce `exclude` and `extend-exclude` patterns, even for
/// paths that are passed to Ruff explicitly. Typically, Ruff will lint
/// any paths passed in directly, even if they would typically be
/// excluded. Setting `force-exclude = true` will cause Ruff to
/// respect these exclusions unequivocally.
///
/// This is useful for [`pre-commit`](https://pre-commit.com/), which explicitly passes all
/// changed files to the [`ruff-pre-commit`](https://github.com/astral-sh/ruff-pre-commit)
/// plugin, regardless of whether they're marked as excluded by Ruff's own
/// settings.
pub force_exclude: Option<bool>,
#[option(
default = "[]",
value_type = "list[RuleSelector]",
example = r#"
# Skip unused variable rules (`F841`).
ignore = ["F841"]
"#
)]
/// A list of rule codes or prefixes to ignore. Prefixes can specify exact
/// rules (like `F841`), entire categories (like `F`), or anything in
/// between.
///
/// When breaking ties between enabled and disabled rules (via `select` and
/// `ignore`, respectively), more specific prefixes override less
/// specific prefixes.
pub ignore: Option<Vec<RuleSelector>>,
#[option(
default = "false",
value_type = "bool",
example = r#"
ignore-init-module-imports = true
"#
)]
/// Avoid automatically removing unused imports in `__init__.py` files. Such
/// imports will still be flagged, but with a dedicated message suggesting
/// that the import is either added to the module's `__all__` symbol, or
/// re-exported with a redundant alias (e.g., `import os as os`).
pub ignore_init_module_imports: Option<bool>,
#[option(
default = r#"["*.py", "*.pyi", "**/pyproject.toml"]"#,
value_type = "list[str]",
example = r#"
include = ["*.py"]
"#
)]
/// A list of file patterns to include when linting.
///
/// Inclusion are based on globs, and should be single-path patterns, like
/// `*.pyw`, to include any file with the `.pyw` extension. `pyproject.toml` is
/// included here not for configuration but because we lint whether e.g. the
/// `[project]` matches the schema.
///
/// For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
pub include: Option<Vec<String>>,
#[option(
default = "88",
value_type = "int",
example = r#"
# Allow lines to be as long as 120 characters.
line-length = 120
"#
)]
/// The line length to use when enforcing long-lines violations (like
/// `E501`). Must be greater than `0`.
pub line_length: Option<LineLength>,
#[option(
default = "4",
value_type = "int",
example = r#"
tab-size = 8
"#
)]
/// The tabulation size to calculate line length.
pub tab_size: Option<TabSize>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"logger-objects = ["logging_setup.logger"]"#
)]
/// A list of objects that should be treated equivalently to a
/// `logging.Logger` object.
///
/// This is useful for ensuring proper diagnostics (e.g., to identify
/// `logging` deprecations and other best-practices) for projects that
/// re-export a `logging.Logger` object from a common module.
///
/// For example, if you have a module `logging_setup.py` with the following
/// contents:
/// ```python
/// import logging
///
/// logger = logging.getLogger(__name__)
/// ```
///
/// Adding `"logging_setup.logger"` to `logger-objects` will ensure that
/// `logging_setup.logger` is treated as a `logging.Logger` object when
/// imported from other modules (e.g., `from logging_setup import logger`).
pub logger_objects: Option<Vec<String>>,
#[option(
default = "None",
value_type = "str",
example = r#"
required-version = "0.0.193"
"#
)]
/// Require a specific version of Ruff to be running (useful for unifying
/// results across many environments, e.g., with a `pyproject.toml`
/// file).
pub required_version: Option<Version>,
#[option(
default = "true",
value_type = "bool",
example = r#"
respect-gitignore = false
"#
)]
/// Whether to automatically exclude files that are ignored by `.ignore`,
/// `.gitignore`, `.git/info/exclude`, and global `gitignore` files.
/// Enabled by default.
pub respect_gitignore: Option<bool>,
#[option(
default = r#"["E", "F"]"#,
value_type = "list[RuleSelector]",
example = r#"
# On top of the defaults (`E`, `F`), enable flake8-bugbear (`B`) and flake8-quotes (`Q`).
select = ["E", "F", "B", "Q"]
"#
)]
/// A list of rule codes or prefixes to enable. Prefixes can specify exact
/// rules (like `F841`), entire categories (like `F`), or anything in
/// between.
///
/// When breaking ties between enabled and disabled rules (via `select` and
/// `ignore`, respectively), more specific prefixes override less
/// specific prefixes.
pub select: Option<Vec<RuleSelector>>,
#[option(
default = "false",
value_type = "bool",
example = r#"
# By default, always show source code snippets.
show-source = true
"#
)]
/// Whether to show source code snippets when reporting lint violations
/// (overridden by the `--show-source` command-line flag).
pub show_source: Option<bool>,
#[option(
default = "false",
value_type = "bool",
example = r#"
# Enumerate all fixed violations.
show-fixes = true
"#
)]
/// Whether to show an enumeration of all autofixed lint violations
/// (overridden by the `--show-fixes` command-line flag).
pub show_fixes: Option<bool>,
#[option(
default = r#"["."]"#,
value_type = "list[str]",
example = r#"
# Allow imports relative to the "src" and "test" directories.
src = ["src", "test"]
"#
)]
/// The directories to consider when resolving first- vs. third-party
/// imports.
///
/// As an example: given a Python package structure like:
///
/// ```text
/// my_project
/// ├── pyproject.toml
/// └── src
/// └── my_package
/// ├── __init__.py
/// ├── foo.py
/// └── bar.py
/// ```
///
/// The `./src` directory should be included in the `src` option
/// (e.g., `src = ["src"]`), such that when resolving imports,
/// `my_package.foo` is considered a first-party import.
///
/// When omitted, the `src` directory will typically default to the
/// directory containing the nearest `pyproject.toml`, `ruff.toml`, or
/// `.ruff.toml` file (the "project root"), unless a configuration file
/// is explicitly provided (e.g., via the `--config` command-line flag).
///
/// This field supports globs. For example, if you have a series of Python
/// packages in a `python_modules` directory, `src = ["python_modules/*"]`
/// would expand to incorporate all of the packages in that directory. User
/// home directory and environment variables will also be expanded.
pub src: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"
namespace-packages = ["airflow/providers"]
"#
)]
/// Mark the specified directories as namespace packages. For the purpose of
/// module resolution, Ruff will treat those directories as if they
/// contained an `__init__.py` file.
pub namespace_packages: Option<Vec<String>>,
#[option(
default = r#""py38""#,
value_type = r#""py37" | "py38" | "py39" | "py310" | "py311" | "py312""#,
example = r#"
# Always generate Python 3.7-compatible code.
target-version = "py37"
"#
)]
/// The minimum Python version to target, e.g., when considering automatic
/// code upgrades, like rewriting type annotations. Ruff will not propose
/// changes using features that are not available in the given version.
///
/// For example, to represent supporting Python >=3.10 or ==3.10
/// specify `target-version = "py310"`.
///
/// If omitted, and Ruff is configured via a `pyproject.toml` file, the
/// target version will be inferred from its `project.requires-python`
/// field (e.g., `requires-python = ">=3.8"`). If Ruff is configured via
/// `ruff.toml` or `.ruff.toml`, no such inference will be performed.
pub target_version: Option<PythonVersion>,
#[option(
default = r#"["TODO", "FIXME", "XXX"]"#,
value_type = "list[str]",
example = r#"task-tags = ["HACK"]"#
)]
/// A list of task tags to recognize (e.g., "TODO", "FIXME", "XXX").
///
/// Comments starting with these tags will be ignored by commented-out code
/// detection (`ERA`), and skipped by line-length rules (`E501`) if
/// `ignore-overlong-task-comments` is set to `true`.
pub task_tags: Option<Vec<String>>,
#[option(
default = r#"[]"#,
value_type = "list[str]",
example = r#"typing-modules = ["airflow.typing_compat"]"#
)]
/// A list of modules whose exports should be treated equivalently to
/// members of the `typing` module.
///
/// This is useful for ensuring proper type annotation inference for
/// projects that re-export `typing` and `typing_extensions` members
/// from a compatibility module. If omitted, any members imported from
/// modules apart from `typing` and `typing_extensions` will be treated
/// as ordinary Python objects.
pub typing_modules: Option<Vec<String>>,
#[option(
default = "[]",
value_type = "list[RuleSelector]",
example = r#"
# Disable autofix for unused imports (`F401`).
unfixable = ["F401"]
"#
)]
/// A list of rule codes or prefixes to consider non-autofix-able.
pub unfixable: Option<Vec<RuleSelector>>,
#[option_group]
/// Options for the `flake8-annotations` plugin.
pub flake8_annotations: Option<flake8_annotations::settings::Options>,
#[option_group]
/// Options for the `flake8-bandit` plugin.
pub flake8_bandit: Option<flake8_bandit::settings::Options>,
#[option_group]
/// Options for the `flake8-bugbear` plugin.
pub flake8_bugbear: Option<flake8_bugbear::settings::Options>,
#[option_group]
/// Options for the `flake8-builtins` plugin.
pub flake8_builtins: Option<flake8_builtins::settings::Options>,
#[option_group]
/// Options for the `flake8-comprehensions` plugin.
pub flake8_comprehensions: Option<flake8_comprehensions::settings::Options>,
#[option_group]
/// Options for the `flake8-copyright` plugin.
pub flake8_copyright: Option<flake8_copyright::settings::Options>,
#[option_group]
/// Options for the `flake8-errmsg` plugin.
pub flake8_errmsg: Option<flake8_errmsg::settings::Options>,
#[option_group]
/// Options for the `flake8-quotes` plugin.
pub flake8_quotes: Option<flake8_quotes::settings::Options>,
#[option_group]
/// Options for the `flake8_self` plugin.
pub flake8_self: Option<flake8_self::settings::Options>,
#[option_group]
/// Options for the `flake8-tidy-imports` plugin.
pub flake8_tidy_imports: Option<flake8_tidy_imports::options::Options>,
#[option_group]
/// Options for the `flake8-type-checking` plugin.
pub flake8_type_checking: Option<flake8_type_checking::settings::Options>,
#[option_group]
/// Options for the `flake8-gettext` plugin.
pub flake8_gettext: Option<flake8_gettext::settings::Options>,
#[option_group]
/// Options for the `flake8-implicit-str-concat` plugin.
pub flake8_implicit_str_concat: Option<flake8_implicit_str_concat::settings::Options>,
#[option_group]
/// Options for the `flake8-import-conventions` plugin.
pub flake8_import_conventions: Option<flake8_import_conventions::settings::Options>,
#[option_group]
/// Options for the `flake8-pytest-style` plugin.
pub flake8_pytest_style: Option<flake8_pytest_style::settings::Options>,
#[option_group]
/// Options for the `flake8-unused-arguments` plugin.
pub flake8_unused_arguments: Option<flake8_unused_arguments::settings::Options>,
#[option_group]
/// Options for the `isort` plugin.
pub isort: Option<isort::settings::Options>,
#[option_group]
/// Options for the `mccabe` plugin.
pub mccabe: Option<mccabe::settings::Options>,
#[option_group]
/// Options for the `pep8-naming` plugin.
pub pep8_naming: Option<pep8_naming::settings::Options>,
#[option_group]
/// Options for the `pycodestyle` plugin.
pub pycodestyle: Option<pycodestyle::settings::Options>,
#[option_group]
/// Options for the `pydocstyle` plugin.
pub pydocstyle: Option<pydocstyle::settings::Options>,
#[option_group]
/// Options for the `pyflakes` plugin.
pub pyflakes: Option<pyflakes::settings::Options>,
#[option_group]
/// Options for the `pylint` plugin.
pub pylint: Option<pylint::settings::Options>,
#[option_group]
/// Options for the `pyupgrade` plugin.
pub pyupgrade: Option<pyupgrade::settings::Options>,
// Tables are required to go last.
#[option(
default = "{}",
value_type = "dict[str, list[RuleSelector]]",
example = r#"
# Ignore `E402` (import violations) in all `__init__.py` files, and in `path/to/file.py`.
[tool.ruff.per-file-ignores]
"__init__.py" = ["E402"]
"path/to/file.py" = ["E402"]
"#
)]
/// A list of mappings from file pattern to rule codes or prefixes to
/// exclude, when considering any matching files.
pub per_file_ignores: Option<FxHashMap<String, Vec<RuleSelector>>>,
#[option(
default = "{}",
value_type = "dict[str, list[RuleSelector]]",
example = r#"
# Also ignore `E402` in all `__init__.py` files.
[tool.ruff.extend-per-file-ignores]
"__init__.py" = ["E402"]
"#
)]
/// A list of mappings from file pattern to rule codes or prefixes to
/// exclude, in addition to any rules excluded by `per-file-ignores`.
pub extend_per_file_ignores: Option<FxHashMap<String, Vec<RuleSelector>>>,
}

View file

@ -30,6 +30,7 @@ ruff_python_ast = { path = "../ruff_python_ast" }
ruff_python_formatter = { path = "../ruff_python_formatter" }
ruff_source_file = { path = "../ruff_source_file" }
ruff_python_trivia = { path = "../ruff_python_trivia" }
ruff_workspace = { path = "../ruff_workspace" }
ruff_text_size = { path = "../ruff_text_size" }
annotate-snippets = { version = "0.9.1", features = ["color"] }

View file

@ -8,12 +8,12 @@ use rustc_hash::FxHashMap;
use ruff::line_width::LineLength;
use ruff::logging::LogLevel;
use ruff::registry::Rule;
use ruff::resolver::ConfigProcessor;
use ruff::settings::configuration::RuleSelection;
use ruff::settings::types::{
FilePattern, PatternPrefixPair, PerFileIgnore, PythonVersion, SerializationFormat,
};
use ruff::RuleSelector;
use ruff_workspace::configuration::{Configuration, RuleSelection};
use ruff_workspace::resolver::ConfigProcessor;
#[derive(Debug, Parser)]
#[command(
@ -514,7 +514,7 @@ pub struct Overrides {
}
impl ConfigProcessor for Overrides {
fn process_config(&self, config: &mut ruff::settings::configuration::Configuration) {
fn process_config(&self, config: &mut Configuration) {
if let Some(cache_dir) = &self.cache_dir {
config.cache_dir = Some(cache_dir.clone());
}

View file

@ -7,9 +7,9 @@ use log::{debug, error};
use rayon::prelude::*;
use ruff::linter::add_noqa_to_path;
use ruff::resolver::PyprojectConfig;
use ruff::{packaging, resolver, warn_user_once};
use ruff::warn_user_once;
use ruff_python_stdlib::path::{is_jupyter_notebook, is_project_toml};
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig};
use crate::args::Overrides;
@ -21,7 +21,7 @@ pub(crate) fn add_noqa(
) -> Result<usize> {
// Collect all the files to check.
let start = Instant::now();
let (paths, resolver) = resolver::python_files_in_path(files, pyproject_config, overrides)?;
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
let duration = start.elapsed();
debug!("Identified files to lint in: {:?}", duration);
@ -31,13 +31,12 @@ pub(crate) fn add_noqa(
}
// Discover the package root for each Python file.
let package_roots = packaging::detect_package_roots(
let package_roots = resolver.package_roots(
&paths
.iter()
.flatten()
.map(ignore::DirEntry::path)
.collect::<Vec<_>>(),
&resolver,
pyproject_config,
);

View file

@ -1,6 +1,5 @@
use ruff::settings::options::Options;
use crate::ExitStatus;
use ruff_workspace::options::Options;
#[allow(clippy::print_stdout)]
pub(crate) fn config(key: Option<&str>) -> ExitStatus {

View file

@ -9,11 +9,11 @@ use thiserror::Error;
use tracing::{span, Level};
use ruff::fs;
use ruff::resolver::python_files_in_path;
use ruff::warn_user_once;
use ruff_formatter::LineWidth;
use ruff_python_ast::PySourceType;
use ruff_python_formatter::{format_module, FormatModuleError, PyFormatOptions};
use ruff_workspace::resolver::python_files_in_path;
use crate::args::{Arguments, Overrides};
use crate::resolve::resolve;

View file

@ -11,16 +11,16 @@ use itertools::Itertools;
use log::{debug, error, warn};
#[cfg(not(target_family = "wasm"))]
use rayon::prelude::*;
use ruff_text_size::{TextRange, TextSize};
use ruff::message::Message;
use ruff::registry::Rule;
use ruff::resolver::{PyprojectConfig, PyprojectDiscoveryStrategy};
use ruff::settings::{flags, AllSettings};
use ruff::{fs, packaging, resolver, warn_user_once, IOError};
use ruff::{fs, warn_user_once, IOError};
use ruff_diagnostics::Diagnostic;
use ruff_python_ast::imports::ImportMap;
use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize};
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig, PyprojectDiscoveryStrategy};
use crate::args::Overrides;
use crate::cache::{self, Cache};
@ -38,7 +38,7 @@ pub(crate) fn run(
) -> Result<Diagnostics> {
// Collect all the Python files to check.
let start = Instant::now();
let (paths, resolver) = resolver::python_files_in_path(files, pyproject_config, overrides)?;
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
let duration = start.elapsed();
debug!("Identified files to lint in: {:?}", duration);
@ -60,7 +60,9 @@ pub(crate) fn run(
init_cache(&pyproject_config.settings.cli.cache_dir);
}
PyprojectDiscoveryStrategy::Hierarchical => {
for settings in std::iter::once(&pyproject_config.settings).chain(resolver.iter()) {
for settings in
std::iter::once(&pyproject_config.settings).chain(resolver.settings())
{
init_cache(&settings.cli.cache_dir);
}
}
@ -68,13 +70,12 @@ pub(crate) fn run(
};
// Discover the package root for each Python file.
let package_roots = packaging::detect_package_roots(
let package_roots = resolver.package_roots(
&paths
.iter()
.flatten()
.map(ignore::DirEntry::path)
.collect::<Vec<_>>(),
&resolver,
pyproject_config,
);
@ -230,18 +231,22 @@ with the relevant file contents, the `pyproject.toml` settings, and the followin
#[cfg(test)]
#[cfg(unix)]
mod test {
use super::run;
use crate::args::Overrides;
use anyhow::Result;
use ruff::message::{Emitter, EmitterContext, TextEmitter};
use ruff::registry::Rule;
use ruff::resolver::{PyprojectConfig, PyprojectDiscoveryStrategy};
use ruff::settings::{flags, AllSettings, CliSettings, Settings};
use rustc_hash::FxHashMap;
use std::fs;
use std::os::unix::fs::OpenOptionsExt;
use anyhow::Result;
use rustc_hash::FxHashMap;
use tempfile::TempDir;
use ruff::message::{Emitter, EmitterContext, TextEmitter};
use ruff::registry::Rule;
use ruff::settings::{flags, AllSettings, CliSettings, Settings};
use ruff_workspace::resolver::{PyprojectConfig, PyprojectDiscoveryStrategy};
use crate::args::Overrides;
use super::run;
/// We check that regular python files, pyproject.toml and jupyter notebooks all handle io
/// errors gracefully
#[test]

View file

@ -3,9 +3,9 @@ use std::path::Path;
use anyhow::Result;
use ruff::resolver::PyprojectConfig;
use ruff::packaging;
use ruff::settings::flags;
use ruff::{packaging, resolver};
use ruff_workspace::resolver::{python_file_at_path, PyprojectConfig};
use crate::args::Overrides;
use crate::diagnostics::{lint_stdin, Diagnostics};
@ -26,7 +26,7 @@ pub(crate) fn run_stdin(
autofix: flags::FixMode,
) -> Result<Diagnostics> {
if let Some(filename) = filename {
if !resolver::python_file_at_path(filename, pyproject_config, overrides)? {
if !python_file_at_path(filename, pyproject_config, overrides)? {
return Ok(Diagnostics::default());
}
}

View file

@ -4,8 +4,8 @@ use std::path::PathBuf;
use anyhow::Result;
use itertools::Itertools;
use ruff::resolver::PyprojectConfig;
use ruff::{resolver, warn_user_once};
use ruff::warn_user_once;
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig};
use crate::args::Overrides;
@ -17,7 +17,7 @@ pub(crate) fn show_files(
writer: &mut impl Write,
) -> Result<()> {
// Collect all files in the hierarchy.
let (paths, _resolver) = resolver::python_files_in_path(files, pyproject_config, overrides)?;
let (paths, _resolver) = python_files_in_path(files, pyproject_config, overrides)?;
if paths.is_empty() {
warn_user_once!("No Python files found under the given path(s)");

View file

@ -4,8 +4,7 @@ use std::path::PathBuf;
use anyhow::{bail, Result};
use itertools::Itertools;
use ruff::resolver;
use ruff::resolver::PyprojectConfig;
use ruff_workspace::resolver::{python_files_in_path, PyprojectConfig};
use crate::args::Overrides;
@ -17,7 +16,7 @@ pub(crate) fn show_settings(
writer: &mut impl Write,
) -> Result<()> {
// Collect all files in the hierarchy.
let (paths, resolver) = resolver::python_files_in_path(files, pyproject_config, overrides)?;
let (paths, resolver) = python_files_in_path(files, pyproject_config, overrides)?;
// Print the list of files.
let Some(entry) = paths

View file

@ -4,12 +4,12 @@ use anyhow::Result;
use log::debug;
use path_absolutize::path_dedot;
use ruff::resolver::{
use ruff_workspace::configuration::Configuration;
use ruff_workspace::pyproject;
use ruff_workspace::resolver::{
resolve_settings_with_processor, ConfigProcessor, PyprojectConfig, PyprojectDiscoveryStrategy,
Relativity,
};
use ruff::settings::configuration::Configuration;
use ruff::settings::{pyproject, AllSettings};
use crate::args::Overrides;
@ -25,7 +25,7 @@ pub fn resolve(
if isolated {
let mut config = Configuration::default();
overrides.process_config(&mut config);
let settings = AllSettings::from_configuration(config, &path_dedot::CWD)?;
let settings = config.into_all_settings(&path_dedot::CWD)?;
debug!("Isolated mode, not reading any pyproject.toml");
return Ok(PyprojectConfig::new(
PyprojectDiscoveryStrategy::Fixed,
@ -42,7 +42,7 @@ pub fn resolve(
.map(|config| shellexpand::full(&config).map(|config| PathBuf::from(config.as_ref())))
.transpose()?
{
let settings = resolve_settings_with_processor(&pyproject, &Relativity::Cwd, overrides)?;
let settings = resolve_settings_with_processor(&pyproject, Relativity::Cwd, overrides)?;
debug!(
"Using user specified pyproject.toml at {}",
pyproject.display()
@ -65,7 +65,7 @@ pub fn resolve(
.unwrap_or(&path_dedot::CWD.as_path()),
)? {
debug!("Using pyproject.toml (parent) at {}", pyproject.display());
let settings = resolve_settings_with_processor(&pyproject, &Relativity::Parent, overrides)?;
let settings = resolve_settings_with_processor(&pyproject, Relativity::Parent, overrides)?;
return Ok(PyprojectConfig::new(
PyprojectDiscoveryStrategy::Hierarchical,
settings,
@ -79,7 +79,7 @@ pub fn resolve(
// these act as the "default" settings.)
if let Some(pyproject) = pyproject::find_user_settings_toml() {
debug!("Using pyproject.toml (cwd) at {}", pyproject.display());
let settings = resolve_settings_with_processor(&pyproject, &Relativity::Cwd, overrides)?;
let settings = resolve_settings_with_processor(&pyproject, Relativity::Cwd, overrides)?;
return Ok(PyprojectConfig::new(
PyprojectDiscoveryStrategy::Hierarchical,
settings,
@ -94,7 +94,7 @@ pub fn resolve(
debug!("Using Ruff default settings");
let mut config = Configuration::default();
overrides.process_config(&mut config);
let settings = AllSettings::from_configuration(config, &path_dedot::CWD)?;
let settings = config.into_all_settings(&path_dedot::CWD)?;
Ok(PyprojectConfig::new(
PyprojectDiscoveryStrategy::Hierarchical,
settings,

View file

@ -22,6 +22,7 @@ ruff_python_literal = { path = "../ruff_python_literal" }
ruff_python_parser = { path = "../ruff_python_parser" }
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
ruff_python_trivia = { path = "../ruff_python_trivia" }
ruff_workspace = { path = "../ruff_workspace", features = ["schemars"]}
anyhow = { workspace = true }
clap = { workspace = true }

View file

@ -8,7 +8,6 @@ use indicatif::ProgressStyle;
#[cfg_attr(feature = "singlethreaded", allow(unused_imports))]
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use ruff::logging::LogLevel;
use ruff::resolver::python_files_in_path;
use ruff::settings::types::{FilePattern, FilePatternSet};
use ruff_cli::args::{CheckArgs, LogLevelArgs};
use ruff_cli::resolve::resolve;
@ -16,6 +15,7 @@ use ruff_formatter::{FormatError, LineWidth, PrintError};
use ruff_python_formatter::{
format_module, FormatModuleError, MagicTrailingComma, PyFormatOptions,
};
use ruff_workspace::resolver::python_files_in_path;
use serde::Deserialize;
use similar::{ChangeTag, TextDiff};
use std::fmt::{Display, Formatter};

View file

@ -9,8 +9,8 @@ use regex::{Captures, Regex};
use strum::IntoEnumIterator;
use ruff::registry::{Linter, Rule, RuleNamespace};
use ruff::settings::options::Options;
use ruff_diagnostics::AutofixKind;
use ruff_workspace::options::Options;
use crate::ROOT_DIR;

View file

@ -7,10 +7,9 @@ use anyhow::{bail, Result};
use pretty_assertions::StrComparison;
use schemars::schema_for;
use ruff::settings::options::Options;
use crate::generate_all::{Mode, REGENERATE_ALL_COMMAND};
use crate::ROOT_DIR;
use ruff_workspace::options::Options;
#[derive(clap::Args)]
pub(crate) struct Args {

View file

@ -2,9 +2,8 @@
//!
//! Used for <https://beta.ruff.rs/docs/settings/>.
use itertools::Itertools;
use ruff::settings::options::Options;
use ruff::settings::options_base::{OptionEntry, OptionField};
use ruff_workspace::options::Options;
use ruff_workspace::options_base::{OptionEntry, OptionField};
fn emit_field(output: &mut String, name: &str, field: &OptionField, group_name: Option<&str>) {
// if there's a group name, we need to add it to the anchor

View file

@ -6,9 +6,9 @@ use itertools::Itertools;
use strum::IntoEnumIterator;
use ruff::registry::{Linter, Rule, RuleNamespace};
use ruff::settings::options::Options;
use ruff::upstream_categories::UpstreamCategoryAndPrefix;
use ruff_diagnostics::AutofixKind;
use ruff_workspace::options::Options;
const FIX_SYMBOL: &str = "🛠";
const NURSERY_SYMBOL: &str = "🌅";

View file

@ -16,9 +16,7 @@ pub(crate) fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenS
.collect::<Result<Vec<_>, _>>()?;
Ok(quote! {
use crate::settings::configuration::CombinePluginOptions;
impl CombinePluginOptions for #ident {
impl crate::configuration::CombinePluginOptions for #ident {
fn combine(self, other: Self) -> Self {
Self {
#(

View file

@ -53,12 +53,11 @@ pub(crate) fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenS
let options_len = output.len();
Ok(quote! {
use crate::settings::options_base::{OptionEntry, OptionField, OptionGroup};
impl #ident {
pub const fn metadata() -> OptionGroup {
const OPTIONS: [(&'static str, OptionEntry); #options_len] = [#(#output),*];
OptionGroup::new(&OPTIONS)
pub const fn metadata() -> crate::options_base::OptionGroup {
const OPTIONS: [(&'static str, crate::options_base::OptionEntry); #options_len] = [#(#output),*];
crate::options_base::OptionGroup::new(&OPTIONS)
}
}
})
@ -93,7 +92,7 @@ fn handle_option_group(field: &Field) -> syn::Result<proc_macro2::TokenStream> {
let kebab_name = LitStr::new(&ident.to_string().replace('_', "-"), ident.span());
Ok(quote_spanned!(
ident.span() => (#kebab_name, OptionEntry::Group(#path::metadata()))
ident.span() => (#kebab_name, crate::options_base::OptionEntry::Group(#path::metadata()))
))
}
_ => Err(syn::Error::new(
@ -151,7 +150,7 @@ fn handle_option(
let kebab_name = LitStr::new(&ident.to_string().replace('_', "-"), ident.span());
Ok(quote_spanned!(
ident.span() => (#kebab_name, OptionEntry::Field(OptionField {
ident.span() => (#kebab_name, crate::options_base::OptionEntry::Field(crate::options_base::OptionField {
doc: &#doc,
default: &#default,
value_type: &#value_type,

View file

@ -27,6 +27,7 @@ ruff_python_index = { path = "../ruff_python_index" }
ruff_python_parser = { path = "../ruff_python_parser" }
ruff_source_file = { path = "../ruff_source_file" }
ruff_text_size = { path = "../ruff_text_size" }
ruff_workspace = { path = "../ruff_workspace" }
console_error_panic_hook = { version = "0.1.7", optional = true }
console_log = { version = "1.0.0" }

View file

@ -8,15 +8,6 @@ use ruff::directives;
use ruff::line_width::{LineLength, TabSize};
use ruff::linter::{check_path, LinterResult};
use ruff::registry::AsRule;
use ruff::rules::{
flake8_annotations, flake8_bandit, flake8_bugbear, flake8_builtins, flake8_comprehensions,
flake8_copyright, flake8_errmsg, flake8_gettext, flake8_implicit_str_concat,
flake8_import_conventions, flake8_pytest_style, flake8_quotes, flake8_self,
flake8_tidy_imports, flake8_type_checking, flake8_unused_arguments, isort, mccabe, pep8_naming,
pycodestyle, pydocstyle, pyflakes, pylint, pyupgrade,
};
use ruff::settings::configuration::Configuration;
use ruff::settings::options::Options;
use ruff::settings::types::PythonVersion;
use ruff::settings::{defaults, flags, Settings};
use ruff_python_ast::PySourceType;
@ -28,6 +19,8 @@ use ruff_python_parser::AsMode;
use ruff_python_parser::{parse_tokens, Mode};
use ruff_source_file::{Locator, SourceLocation};
use ruff_text_size::Ranged;
use ruff_workspace::configuration::Configuration;
use ruff_workspace::options::Options;
#[wasm_bindgen(typescript_custom_section)]
const TYPES: &'static str = r#"
@ -113,8 +106,9 @@ impl Workspace {
let options: Options = serde_wasm_bindgen::from_value(options).map_err(into_error)?;
let configuration =
Configuration::from_options(options, Path::new(".")).map_err(into_error)?;
let settings =
Settings::from_configuration(configuration, Path::new(".")).map_err(into_error)?;
let settings = configuration
.into_settings(Path::new("."))
.map_err(into_error)?;
Ok(Workspace { settings })
}
@ -161,39 +155,7 @@ impl Workspace {
task_tags: None,
typing_modules: None,
unfixable: None,
// Use default options for all plugins.
flake8_annotations: Some(flake8_annotations::settings::Settings::default().into()),
flake8_bandit: Some(flake8_bandit::settings::Settings::default().into()),
flake8_bugbear: Some(flake8_bugbear::settings::Settings::default().into()),
flake8_builtins: Some(flake8_builtins::settings::Settings::default().into()),
flake8_comprehensions: Some(
flake8_comprehensions::settings::Settings::default().into(),
),
flake8_copyright: Some(flake8_copyright::settings::Settings::default().into()),
flake8_errmsg: Some(flake8_errmsg::settings::Settings::default().into()),
flake8_gettext: Some(flake8_gettext::settings::Settings::default().into()),
flake8_implicit_str_concat: Some(
flake8_implicit_str_concat::settings::Settings::default().into(),
),
flake8_import_conventions: Some(
flake8_import_conventions::settings::Settings::default().into(),
),
flake8_pytest_style: Some(flake8_pytest_style::settings::Settings::default().into()),
flake8_quotes: Some(flake8_quotes::settings::Settings::default().into()),
flake8_self: Some(flake8_self::settings::Settings::default().into()),
flake8_tidy_imports: Some(flake8_tidy_imports::settings::Settings::default().into()),
flake8_type_checking: Some(flake8_type_checking::settings::Settings::default().into()),
flake8_unused_arguments: Some(
flake8_unused_arguments::settings::Settings::default().into(),
),
isort: Some(isort::settings::Settings::default().into()),
mccabe: Some(mccabe::settings::Settings::default().into()),
pep8_naming: Some(pep8_naming::settings::Settings::default().into()),
pycodestyle: Some(pycodestyle::settings::Settings::default().into()),
pydocstyle: Some(pydocstyle::settings::Settings::default().into()),
pyflakes: Some(pyflakes::settings::Settings::default().into()),
pylint: Some(pylint::settings::Settings::default().into()),
pyupgrade: Some(pyupgrade::settings::Settings::default().into()),
..Options::default()
})
.map_err(into_error)
}

View file

@ -0,0 +1,43 @@
[package]
name = "ruff_workspace"
version = "0.0.0"
publish = false
authors = { workspace = true }
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
documentation = { workspace = true }
repository = { workspace = true }
license = { workspace = true }
[lib]
[dependencies]
ruff = { path = "../ruff" }
ruff_cache = { path = "../ruff_cache" }
ruff_macros = { path = "../ruff_macros" }
anyhow = { workspace = true }
colored = { workspace = true }
dirs = { version = "5.0.0" }
ignore = { workspace = true }
itertools = { workspace = true }
log = { workspace = true }
glob = { workspace = true }
globset = { workspace = true }
path-absolutize = { workspace = true }
pep440_rs = { version = "0.3.1", features = ["serde"] }
regex = { workspace = true }
rustc-hash = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true}
shellexpand = { workspace = true }
strum = { workspace = true }
toml = { workspace = true }
[dev-dependencies]
tempfile = "3.6.0"
[features]
schemars = [ "dep:schemars" ]

View file

@ -0,0 +1,906 @@
//! User-provided program settings, taking into account pyproject.toml and
//! command-line options. Structure mirrors the user-facing representation of
//! the various parameters.
use std::borrow::Cow;
use std::env::VarError;
use std::path::{Path, PathBuf};
use crate::options::{
Flake8AnnotationsOptions, Flake8BanditOptions, Flake8BugbearOptions, Flake8BuiltinsOptions,
Flake8ComprehensionsOptions, Flake8CopyrightOptions, Flake8ErrMsgOptions, Flake8GetTextOptions,
Flake8ImplicitStrConcatOptions, Flake8ImportConventionsOptions, Flake8PytestStyleOptions,
Flake8QuotesOptions, Flake8SelfOptions, Flake8TidyImportsOptions, Flake8TypeCheckingOptions,
Flake8UnusedArgumentsOptions, IsortOptions, McCabeOptions, Options, Pep8NamingOptions,
PyUpgradeOptions, PycodestyleOptions, PydocstyleOptions, PyflakesOptions, PylintOptions,
};
use anyhow::{anyhow, Result};
use glob::{glob, GlobError, Paths, PatternError};
use regex::Regex;
use ruff::line_width::{LineLength, TabSize};
use ruff::registry::RuleNamespace;
use ruff::registry::{Rule, RuleSet, INCOMPATIBLE_CODES};
use ruff::rule_selector::Specificity;
use ruff::settings::rule_table::RuleTable;
use ruff::settings::types::{
FilePattern, FilePatternSet, PerFileIgnore, PythonVersion, SerializationFormat, Version,
};
use ruff::settings::{defaults, resolve_per_file_ignores, AllSettings, CliSettings, Settings};
use ruff::{fs, warn_user_once_by_id, RuleSelector, RUFF_PKG_VERSION};
use ruff_cache::cache_dir;
use rustc_hash::{FxHashMap, FxHashSet};
use shellexpand;
use shellexpand::LookupError;
use strum::IntoEnumIterator;
#[derive(Debug, Default)]
pub struct RuleSelection {
pub select: Option<Vec<RuleSelector>>,
pub ignore: Vec<RuleSelector>,
pub extend_select: Vec<RuleSelector>,
pub fixable: Option<Vec<RuleSelector>>,
pub unfixable: Vec<RuleSelector>,
pub extend_fixable: Vec<RuleSelector>,
}
#[derive(Debug, Default)]
pub struct Configuration {
pub rule_selections: Vec<RuleSelection>,
pub per_file_ignores: Option<Vec<PerFileIgnore>>,
pub allowed_confusables: Option<Vec<char>>,
pub builtins: Option<Vec<String>>,
pub cache_dir: Option<PathBuf>,
pub dummy_variable_rgx: Option<Regex>,
pub exclude: Option<Vec<FilePattern>>,
pub extend: Option<PathBuf>,
pub extend_exclude: Vec<FilePattern>,
pub extend_include: Vec<FilePattern>,
pub extend_per_file_ignores: Vec<PerFileIgnore>,
pub external: Option<Vec<String>>,
pub fix: Option<bool>,
pub fix_only: Option<bool>,
pub force_exclude: Option<bool>,
pub format: Option<SerializationFormat>,
pub ignore_init_module_imports: Option<bool>,
pub include: Option<Vec<FilePattern>>,
pub line_length: Option<LineLength>,
pub logger_objects: Option<Vec<String>>,
pub namespace_packages: Option<Vec<PathBuf>>,
pub required_version: Option<Version>,
pub respect_gitignore: Option<bool>,
pub show_fixes: Option<bool>,
pub show_source: Option<bool>,
pub src: Option<Vec<PathBuf>>,
pub tab_size: Option<TabSize>,
pub target_version: Option<PythonVersion>,
pub task_tags: Option<Vec<String>>,
pub typing_modules: Option<Vec<String>>,
// Plugins
pub flake8_annotations: Option<Flake8AnnotationsOptions>,
pub flake8_bandit: Option<Flake8BanditOptions>,
pub flake8_bugbear: Option<Flake8BugbearOptions>,
pub flake8_builtins: Option<Flake8BuiltinsOptions>,
pub flake8_comprehensions: Option<Flake8ComprehensionsOptions>,
pub flake8_copyright: Option<Flake8CopyrightOptions>,
pub flake8_errmsg: Option<Flake8ErrMsgOptions>,
pub flake8_gettext: Option<Flake8GetTextOptions>,
pub flake8_implicit_str_concat: Option<Flake8ImplicitStrConcatOptions>,
pub flake8_import_conventions: Option<Flake8ImportConventionsOptions>,
pub flake8_pytest_style: Option<Flake8PytestStyleOptions>,
pub flake8_quotes: Option<Flake8QuotesOptions>,
pub flake8_self: Option<Flake8SelfOptions>,
pub flake8_tidy_imports: Option<Flake8TidyImportsOptions>,
pub flake8_type_checking: Option<Flake8TypeCheckingOptions>,
pub flake8_unused_arguments: Option<Flake8UnusedArgumentsOptions>,
pub isort: Option<IsortOptions>,
pub mccabe: Option<McCabeOptions>,
pub pep8_naming: Option<Pep8NamingOptions>,
pub pycodestyle: Option<PycodestyleOptions>,
pub pydocstyle: Option<PydocstyleOptions>,
pub pyflakes: Option<PyflakesOptions>,
pub pylint: Option<PylintOptions>,
pub pyupgrade: Option<PyUpgradeOptions>,
}
impl Configuration {
pub fn into_all_settings(self, project_root: &Path) -> Result<AllSettings> {
Ok(AllSettings {
cli: CliSettings {
cache_dir: self
.cache_dir
.clone()
.unwrap_or_else(|| cache_dir(project_root)),
fix: self.fix.unwrap_or(false),
fix_only: self.fix_only.unwrap_or(false),
format: self.format.unwrap_or_default(),
show_fixes: self.show_fixes.unwrap_or(false),
show_source: self.show_source.unwrap_or(false),
},
lib: self.into_settings(project_root)?,
})
}
pub fn into_settings(self, project_root: &Path) -> Result<Settings> {
if let Some(required_version) = &self.required_version {
if &**required_version != RUFF_PKG_VERSION {
return Err(anyhow!(
"Required version `{}` does not match the running version `{}`",
&**required_version,
RUFF_PKG_VERSION
));
}
}
Ok(Settings {
rules: self.as_rule_table(),
allowed_confusables: self
.allowed_confusables
.map(FxHashSet::from_iter)
.unwrap_or_default(),
builtins: self.builtins.unwrap_or_default(),
dummy_variable_rgx: self
.dummy_variable_rgx
.unwrap_or_else(|| defaults::DUMMY_VARIABLE_RGX.clone()),
exclude: FilePatternSet::try_from_vec(
self.exclude.unwrap_or_else(|| defaults::EXCLUDE.clone()),
)?,
extend_exclude: FilePatternSet::try_from_vec(self.extend_exclude)?,
extend_include: FilePatternSet::try_from_vec(self.extend_include)?,
external: FxHashSet::from_iter(self.external.unwrap_or_default()),
force_exclude: self.force_exclude.unwrap_or(false),
include: FilePatternSet::try_from_vec(
self.include.unwrap_or_else(|| defaults::INCLUDE.clone()),
)?,
ignore_init_module_imports: self.ignore_init_module_imports.unwrap_or_default(),
line_length: self.line_length.unwrap_or_default(),
tab_size: self.tab_size.unwrap_or_default(),
namespace_packages: self.namespace_packages.unwrap_or_default(),
per_file_ignores: resolve_per_file_ignores(
self.per_file_ignores
.unwrap_or_default()
.into_iter()
.chain(self.extend_per_file_ignores)
.collect(),
)?,
respect_gitignore: self.respect_gitignore.unwrap_or(true),
src: self.src.unwrap_or_else(|| vec![project_root.to_path_buf()]),
project_root: project_root.to_path_buf(),
target_version: self.target_version.unwrap_or_default(),
task_tags: self.task_tags.unwrap_or_else(|| {
defaults::TASK_TAGS
.iter()
.map(ToString::to_string)
.collect()
}),
logger_objects: self.logger_objects.unwrap_or_default(),
typing_modules: self.typing_modules.unwrap_or_default(),
// Plugins
flake8_annotations: self
.flake8_annotations
.map(Flake8AnnotationsOptions::into_settings)
.unwrap_or_default(),
flake8_bandit: self
.flake8_bandit
.map(Flake8BanditOptions::into_settings)
.unwrap_or_default(),
flake8_bugbear: self
.flake8_bugbear
.map(Flake8BugbearOptions::into_settings)
.unwrap_or_default(),
flake8_builtins: self
.flake8_builtins
.map(Flake8BuiltinsOptions::into_settings)
.unwrap_or_default(),
flake8_comprehensions: self
.flake8_comprehensions
.map(Flake8ComprehensionsOptions::into_settings)
.unwrap_or_default(),
flake8_copyright: self
.flake8_copyright
.map(Flake8CopyrightOptions::try_into_settings)
.transpose()?
.unwrap_or_default(),
flake8_errmsg: self
.flake8_errmsg
.map(Flake8ErrMsgOptions::into_settings)
.unwrap_or_default(),
flake8_implicit_str_concat: self
.flake8_implicit_str_concat
.map(Flake8ImplicitStrConcatOptions::into_settings)
.unwrap_or_default(),
flake8_import_conventions: self
.flake8_import_conventions
.map(Flake8ImportConventionsOptions::into_settings)
.unwrap_or_default(),
flake8_pytest_style: self
.flake8_pytest_style
.map(Flake8PytestStyleOptions::try_into_settings)
.transpose()?
.unwrap_or_default(),
flake8_quotes: self
.flake8_quotes
.map(Flake8QuotesOptions::into_settings)
.unwrap_or_default(),
flake8_self: self
.flake8_self
.map(Flake8SelfOptions::into_settings)
.unwrap_or_default(),
flake8_tidy_imports: self
.flake8_tidy_imports
.map(Flake8TidyImportsOptions::into_settings)
.unwrap_or_default(),
flake8_type_checking: self
.flake8_type_checking
.map(Flake8TypeCheckingOptions::into_settings)
.unwrap_or_default(),
flake8_unused_arguments: self
.flake8_unused_arguments
.map(Flake8UnusedArgumentsOptions::into_settings)
.unwrap_or_default(),
flake8_gettext: self
.flake8_gettext
.map(Flake8GetTextOptions::into_settings)
.unwrap_or_default(),
isort: self
.isort
.map(IsortOptions::try_into_settings)
.transpose()?
.unwrap_or_default(),
mccabe: self
.mccabe
.map(McCabeOptions::into_settings)
.unwrap_or_default(),
pep8_naming: self
.pep8_naming
.map(Pep8NamingOptions::try_into_settings)
.transpose()?
.unwrap_or_default(),
pycodestyle: self
.pycodestyle
.map(PycodestyleOptions::into_settings)
.unwrap_or_default(),
pydocstyle: self
.pydocstyle
.map(PydocstyleOptions::into_settings)
.unwrap_or_default(),
pyflakes: self
.pyflakes
.map(PyflakesOptions::into_settings)
.unwrap_or_default(),
pylint: self
.pylint
.map(PylintOptions::into_settings)
.unwrap_or_default(),
pyupgrade: self
.pyupgrade
.map(PyUpgradeOptions::into_settings)
.unwrap_or_default(),
})
}
pub fn from_options(options: Options, project_root: &Path) -> Result<Self> {
Ok(Self {
rule_selections: vec![RuleSelection {
select: options.select,
ignore: options
.ignore
.into_iter()
.flatten()
.chain(options.extend_ignore.into_iter().flatten())
.collect(),
extend_select: options.extend_select.unwrap_or_default(),
fixable: options.fixable,
unfixable: options
.unfixable
.into_iter()
.flatten()
.chain(options.extend_unfixable.into_iter().flatten())
.collect(),
extend_fixable: options.extend_fixable.unwrap_or_default(),
}],
allowed_confusables: options.allowed_confusables,
builtins: options.builtins,
cache_dir: options
.cache_dir
.map(|dir| {
let dir = shellexpand::full(&dir);
dir.map(|dir| PathBuf::from(dir.as_ref()))
})
.transpose()
.map_err(|e| anyhow!("Invalid `cache-dir` value: {e}"))?,
dummy_variable_rgx: options
.dummy_variable_rgx
.map(|pattern| Regex::new(&pattern))
.transpose()
.map_err(|e| anyhow!("Invalid `dummy-variable-rgx` value: {e}"))?,
exclude: options.exclude.map(|paths| {
paths
.into_iter()
.map(|pattern| {
let absolute = fs::normalize_path_to(&pattern, project_root);
FilePattern::User(pattern, absolute)
})
.collect()
}),
extend: options
.extend
.map(|extend| {
let extend = shellexpand::full(&extend);
extend.map(|extend| PathBuf::from(extend.as_ref()))
})
.transpose()
.map_err(|e| anyhow!("Invalid `extend` value: {e}"))?,
extend_exclude: options
.extend_exclude
.map(|paths| {
paths
.into_iter()
.map(|pattern| {
let absolute = fs::normalize_path_to(&pattern, project_root);
FilePattern::User(pattern, absolute)
})
.collect()
})
.unwrap_or_default(),
extend_include: options
.extend_include
.map(|paths| {
paths
.into_iter()
.map(|pattern| {
let absolute = fs::normalize_path_to(&pattern, project_root);
FilePattern::User(pattern, absolute)
})
.collect()
})
.unwrap_or_default(),
extend_per_file_ignores: options
.extend_per_file_ignores
.map(|per_file_ignores| {
per_file_ignores
.into_iter()
.map(|(pattern, prefixes)| {
PerFileIgnore::new(pattern, &prefixes, Some(project_root))
})
.collect()
})
.unwrap_or_default(),
external: options.external,
fix: options.fix,
fix_only: options.fix_only,
format: options.format,
force_exclude: options.force_exclude,
ignore_init_module_imports: options.ignore_init_module_imports,
include: options.include.map(|paths| {
paths
.into_iter()
.map(|pattern| {
let absolute = fs::normalize_path_to(&pattern, project_root);
FilePattern::User(pattern, absolute)
})
.collect()
}),
line_length: options.line_length,
tab_size: options.tab_size,
namespace_packages: options
.namespace_packages
.map(|namespace_package| resolve_src(&namespace_package, project_root))
.transpose()?,
per_file_ignores: options.per_file_ignores.map(|per_file_ignores| {
per_file_ignores
.into_iter()
.map(|(pattern, prefixes)| {
PerFileIgnore::new(pattern, &prefixes, Some(project_root))
})
.collect()
}),
required_version: options.required_version,
respect_gitignore: options.respect_gitignore,
show_source: options.show_source,
show_fixes: options.show_fixes,
src: options
.src
.map(|src| resolve_src(&src, project_root))
.transpose()?,
target_version: options.target_version,
task_tags: options.task_tags,
logger_objects: options.logger_objects,
typing_modules: options.typing_modules,
// Plugins
flake8_annotations: options.flake8_annotations,
flake8_bandit: options.flake8_bandit,
flake8_bugbear: options.flake8_bugbear,
flake8_builtins: options.flake8_builtins,
flake8_comprehensions: options.flake8_comprehensions,
flake8_copyright: options.flake8_copyright,
flake8_errmsg: options.flake8_errmsg,
flake8_gettext: options.flake8_gettext,
flake8_implicit_str_concat: options.flake8_implicit_str_concat,
flake8_import_conventions: options.flake8_import_conventions,
flake8_pytest_style: options.flake8_pytest_style,
flake8_quotes: options.flake8_quotes,
flake8_self: options.flake8_self,
flake8_tidy_imports: options.flake8_tidy_imports,
flake8_type_checking: options.flake8_type_checking,
flake8_unused_arguments: options.flake8_unused_arguments,
isort: options.isort,
mccabe: options.mccabe,
pep8_naming: options.pep8_naming,
pycodestyle: options.pycodestyle,
pydocstyle: options.pydocstyle,
pyflakes: options.pyflakes,
pylint: options.pylint,
pyupgrade: options.pyupgrade,
})
}
pub fn as_rule_table(&self) -> RuleTable {
// The select_set keeps track of which rules have been selected.
let mut select_set: RuleSet = defaults::PREFIXES.iter().flatten().collect();
// The fixable set keeps track of which rules are fixable.
let mut fixable_set: RuleSet = RuleSelector::All
.into_iter()
.chain(&RuleSelector::Nursery)
.collect();
// Ignores normally only subtract from the current set of selected
// rules. By that logic the ignore in `select = [], ignore = ["E501"]`
// would be effectless. Instead we carry over the ignores to the next
// selection in that case, creating a way for ignores to be reused
// across config files (which otherwise wouldn't be possible since ruff
// only has `extended` but no `extended-by`).
let mut carryover_ignores: Option<&[RuleSelector]> = None;
let mut carryover_unfixables: Option<&[RuleSelector]> = None;
let mut redirects = FxHashMap::default();
for selection in &self.rule_selections {
// If a selection only specifies extend-select we cannot directly
// apply its rule selectors to the select_set because we firstly have
// to resolve the effectively selected rules within the current rule selection
// (taking specificity into account since more specific selectors take
// precedence over less specific selectors within a rule selection).
// We do this via the following HashMap where the bool indicates
// whether to enable or disable the given rule.
let mut select_map_updates: FxHashMap<Rule, bool> = FxHashMap::default();
let mut fixable_map_updates: FxHashMap<Rule, bool> = FxHashMap::default();
let carriedover_ignores = carryover_ignores.take();
let carriedover_unfixables = carryover_unfixables.take();
for spec in Specificity::iter() {
// Iterate over rule selectors in order of specificity.
for selector in selection
.select
.iter()
.flatten()
.chain(selection.extend_select.iter())
.filter(|s| s.specificity() == spec)
{
for rule in selector {
select_map_updates.insert(rule, true);
}
}
for selector in selection
.ignore
.iter()
.chain(carriedover_ignores.into_iter().flatten())
.filter(|s| s.specificity() == spec)
{
for rule in selector {
select_map_updates.insert(rule, false);
}
}
// Apply the same logic to `fixable` and `unfixable`.
for selector in selection
.fixable
.iter()
.flatten()
.chain(selection.extend_fixable.iter())
.filter(|s| s.specificity() == spec)
{
for rule in selector {
fixable_map_updates.insert(rule, true);
}
}
for selector in selection
.unfixable
.iter()
.chain(carriedover_unfixables.into_iter().flatten())
.filter(|s| s.specificity() == spec)
{
for rule in selector {
fixable_map_updates.insert(rule, false);
}
}
}
if let Some(select) = &selection.select {
// If the `select` option is given we reassign the whole select_set
// (overriding everything that has been defined previously).
select_set = select_map_updates
.into_iter()
.filter_map(|(rule, enabled)| enabled.then_some(rule))
.collect();
if select.is_empty()
&& selection.extend_select.is_empty()
&& !selection.ignore.is_empty()
{
carryover_ignores = Some(&selection.ignore);
}
} else {
// Otherwise we apply the updates on top of the existing select_set.
for (rule, enabled) in select_map_updates {
if enabled {
select_set.insert(rule);
} else {
select_set.remove(rule);
}
}
}
// Apply the same logic to `fixable` and `unfixable`.
if let Some(fixable) = &selection.fixable {
fixable_set = fixable_map_updates
.into_iter()
.filter_map(|(rule, enabled)| enabled.then_some(rule))
.collect();
if fixable.is_empty()
&& selection.extend_fixable.is_empty()
&& !selection.unfixable.is_empty()
{
carryover_unfixables = Some(&selection.unfixable);
}
} else {
for (rule, enabled) in fixable_map_updates {
if enabled {
fixable_set.insert(rule);
} else {
fixable_set.remove(rule);
}
}
}
// We insert redirects into the hashmap so that we
// can warn the users about remapped rule codes.
for selector in selection
.select
.iter()
.chain(selection.fixable.iter())
.flatten()
.chain(selection.ignore.iter())
.chain(selection.extend_select.iter())
.chain(selection.unfixable.iter())
.chain(selection.extend_fixable.iter())
{
if let RuleSelector::Prefix {
prefix,
redirected_from: Some(redirect_from),
} = selector
{
redirects.insert(redirect_from, prefix);
}
}
}
for (from, target) in redirects {
// TODO(martin): This belongs into the ruff_cli crate.
warn_user_once_by_id!(
from,
"`{from}` has been remapped to `{}{}`.",
target.linter().common_prefix(),
target.short_code()
);
}
let mut rules = RuleTable::empty();
for rule in select_set {
let fix = fixable_set.contains(rule);
rules.enable(rule, fix);
}
// If a docstring convention is specified, force-disable any incompatible error
// codes.
if let Some(convention) = self
.pydocstyle
.as_ref()
.and_then(|pydocstyle| pydocstyle.convention)
{
for rule in convention.rules_to_be_ignored() {
rules.disable(*rule);
}
}
// Validate that we didn't enable any incompatible rules. Use this awkward
// approach to give each pair it's own `warn_user_once`.
for (preferred, expendable, message) in INCOMPATIBLE_CODES {
if rules.enabled(*preferred) && rules.enabled(*expendable) {
warn_user_once_by_id!(expendable.as_ref(), "{}", message);
rules.disable(*expendable);
}
}
rules
}
#[must_use]
pub fn combine(self, config: Self) -> Self {
Self {
rule_selections: config
.rule_selections
.into_iter()
.chain(self.rule_selections)
.collect(),
allowed_confusables: self.allowed_confusables.or(config.allowed_confusables),
builtins: self.builtins.or(config.builtins),
cache_dir: self.cache_dir.or(config.cache_dir),
dummy_variable_rgx: self.dummy_variable_rgx.or(config.dummy_variable_rgx),
exclude: self.exclude.or(config.exclude),
extend: self.extend.or(config.extend),
extend_exclude: config
.extend_exclude
.into_iter()
.chain(self.extend_exclude)
.collect(),
extend_include: config
.extend_include
.into_iter()
.chain(self.extend_include)
.collect(),
extend_per_file_ignores: config
.extend_per_file_ignores
.into_iter()
.chain(self.extend_per_file_ignores)
.collect(),
external: self.external.or(config.external),
fix: self.fix.or(config.fix),
fix_only: self.fix_only.or(config.fix_only),
format: self.format.or(config.format),
force_exclude: self.force_exclude.or(config.force_exclude),
include: self.include.or(config.include),
ignore_init_module_imports: self
.ignore_init_module_imports
.or(config.ignore_init_module_imports),
line_length: self.line_length.or(config.line_length),
logger_objects: self.logger_objects.or(config.logger_objects),
tab_size: self.tab_size.or(config.tab_size),
namespace_packages: self.namespace_packages.or(config.namespace_packages),
per_file_ignores: self.per_file_ignores.or(config.per_file_ignores),
required_version: self.required_version.or(config.required_version),
respect_gitignore: self.respect_gitignore.or(config.respect_gitignore),
show_source: self.show_source.or(config.show_source),
show_fixes: self.show_fixes.or(config.show_fixes),
src: self.src.or(config.src),
target_version: self.target_version.or(config.target_version),
task_tags: self.task_tags.or(config.task_tags),
typing_modules: self.typing_modules.or(config.typing_modules),
// Plugins
flake8_annotations: self.flake8_annotations.combine(config.flake8_annotations),
flake8_bandit: self.flake8_bandit.combine(config.flake8_bandit),
flake8_bugbear: self.flake8_bugbear.combine(config.flake8_bugbear),
flake8_builtins: self.flake8_builtins.combine(config.flake8_builtins),
flake8_comprehensions: self
.flake8_comprehensions
.combine(config.flake8_comprehensions),
flake8_copyright: self.flake8_copyright.combine(config.flake8_copyright),
flake8_errmsg: self.flake8_errmsg.combine(config.flake8_errmsg),
flake8_gettext: self.flake8_gettext.combine(config.flake8_gettext),
flake8_implicit_str_concat: self
.flake8_implicit_str_concat
.combine(config.flake8_implicit_str_concat),
flake8_import_conventions: self
.flake8_import_conventions
.combine(config.flake8_import_conventions),
flake8_pytest_style: self.flake8_pytest_style.combine(config.flake8_pytest_style),
flake8_quotes: self.flake8_quotes.combine(config.flake8_quotes),
flake8_self: self.flake8_self.combine(config.flake8_self),
flake8_tidy_imports: self.flake8_tidy_imports.combine(config.flake8_tidy_imports),
flake8_type_checking: self
.flake8_type_checking
.combine(config.flake8_type_checking),
flake8_unused_arguments: self
.flake8_unused_arguments
.combine(config.flake8_unused_arguments),
isort: self.isort.combine(config.isort),
mccabe: self.mccabe.combine(config.mccabe),
pep8_naming: self.pep8_naming.combine(config.pep8_naming),
pycodestyle: self.pycodestyle.combine(config.pycodestyle),
pydocstyle: self.pydocstyle.combine(config.pydocstyle),
pyflakes: self.pyflakes.combine(config.pyflakes),
pylint: self.pylint.combine(config.pylint),
pyupgrade: self.pyupgrade.combine(config.pyupgrade),
}
}
}
pub(crate) trait CombinePluginOptions {
#[must_use]
fn combine(self, other: Self) -> Self;
}
impl<T: CombinePluginOptions> CombinePluginOptions for Option<T> {
fn combine(self, other: Self) -> Self {
match (self, other) {
(Some(base), Some(other)) => Some(base.combine(other)),
(Some(base), None) => Some(base),
(None, Some(other)) => Some(other),
(None, None) => None,
}
}
}
/// Given a list of source paths, which could include glob patterns, resolve the
/// matching paths.
pub fn resolve_src(src: &[String], project_root: &Path) -> Result<Vec<PathBuf>> {
let expansions = src
.iter()
.map(shellexpand::full)
.collect::<Result<Vec<Cow<'_, str>>, LookupError<VarError>>>()?;
let globs = expansions
.iter()
.map(|path| Path::new(path.as_ref()))
.map(|path| fs::normalize_path_to(path, project_root))
.map(|path| glob(&path.to_string_lossy()))
.collect::<Result<Vec<Paths>, PatternError>>()?;
let paths: Vec<PathBuf> = globs
.into_iter()
.flatten()
.collect::<Result<Vec<PathBuf>, GlobError>>()?;
Ok(paths)
}
#[cfg(test)]
mod tests {
use crate::configuration::{Configuration, RuleSelection};
use ruff::codes::Pycodestyle;
use ruff::registry::{Rule, RuleSet};
#[allow(clippy::needless_pass_by_value)]
fn resolve_rules(selections: impl IntoIterator<Item = RuleSelection>) -> RuleSet {
Configuration {
rule_selections: selections.into_iter().collect(),
..Configuration::default()
}
.as_rule_table()
.iter_enabled()
.collect()
}
#[test]
fn rule_codes() {
let actual = resolve_rules([RuleSelection {
select: Some(vec![Pycodestyle::W.into()]),
..RuleSelection::default()
}]);
let expected = RuleSet::from_rules(&[
Rule::TrailingWhitespace,
Rule::MissingNewlineAtEndOfFile,
Rule::BlankLineWithWhitespace,
Rule::DocLineTooLong,
Rule::InvalidEscapeSequence,
Rule::TabIndentation,
]);
assert_eq!(actual, expected);
let actual = resolve_rules([RuleSelection {
select: Some(vec![Pycodestyle::W6.into()]),
..RuleSelection::default()
}]);
let expected = RuleSet::from_rule(Rule::InvalidEscapeSequence);
assert_eq!(actual, expected);
let actual = resolve_rules([RuleSelection {
select: Some(vec![Pycodestyle::W.into()]),
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
}]);
let expected = RuleSet::from_rules(&[
Rule::TrailingWhitespace,
Rule::BlankLineWithWhitespace,
Rule::DocLineTooLong,
Rule::InvalidEscapeSequence,
Rule::TabIndentation,
]);
assert_eq!(actual, expected);
let actual = resolve_rules([RuleSelection {
select: Some(vec![Pycodestyle::W292.into()]),
ignore: vec![Pycodestyle::W.into()],
..RuleSelection::default()
}]);
let expected = RuleSet::from_rule(Rule::MissingNewlineAtEndOfFile);
assert_eq!(actual, expected);
let actual = resolve_rules([RuleSelection {
select: Some(vec![Pycodestyle::W605.into()]),
ignore: vec![Pycodestyle::W605.into()],
..RuleSelection::default()
}]);
let expected = RuleSet::empty();
assert_eq!(actual, expected);
let actual = resolve_rules([
RuleSelection {
select: Some(vec![Pycodestyle::W.into()]),
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
RuleSelection {
extend_select: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
]);
let expected = RuleSet::from_rules(&[
Rule::TrailingWhitespace,
Rule::MissingNewlineAtEndOfFile,
Rule::BlankLineWithWhitespace,
Rule::DocLineTooLong,
Rule::InvalidEscapeSequence,
Rule::TabIndentation,
]);
assert_eq!(actual, expected);
let actual = resolve_rules([
RuleSelection {
select: Some(vec![Pycodestyle::W.into()]),
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
RuleSelection {
extend_select: vec![Pycodestyle::W292.into()],
ignore: vec![Pycodestyle::W.into()],
..RuleSelection::default()
},
]);
let expected = RuleSet::from_rule(Rule::MissingNewlineAtEndOfFile);
assert_eq!(actual, expected);
}
#[test]
fn carry_over_ignore() {
let actual = resolve_rules([
RuleSelection {
select: Some(vec![]),
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
RuleSelection {
select: Some(vec![Pycodestyle::W.into()]),
..RuleSelection::default()
},
]);
let expected = RuleSet::from_rules(&[
Rule::TrailingWhitespace,
Rule::BlankLineWithWhitespace,
Rule::DocLineTooLong,
Rule::InvalidEscapeSequence,
Rule::TabIndentation,
]);
assert_eq!(actual, expected);
let actual = resolve_rules([
RuleSelection {
select: Some(vec![]),
ignore: vec![Pycodestyle::W292.into()],
..RuleSelection::default()
},
RuleSelection {
select: Some(vec![Pycodestyle::W.into()]),
ignore: vec![Pycodestyle::W505.into()],
..RuleSelection::default()
},
]);
let expected = RuleSet::from_rules(&[
Rule::TrailingWhitespace,
Rule::BlankLineWithWhitespace,
Rule::InvalidEscapeSequence,
Rule::TabIndentation,
]);
assert_eq!(actual, expected);
}
}

View file

@ -0,0 +1,15 @@
pub mod configuration;
pub mod options;
pub mod pyproject;
pub mod resolver;
pub mod options_base;
#[cfg(test)]
mod tests {
use std::path::Path;
pub(crate) fn test_resource_path(path: impl AsRef<Path>) -> std::path::PathBuf {
Path::new("../ruff/resources/test/").join(path)
}
}

File diff suppressed because it is too large Load diff

View file

@ -35,7 +35,7 @@ impl OptionGroup {
/// ### Find a direct child
///
/// ```rust
/// # use ruff::settings::options_base::{OptionGroup, OptionEntry, OptionField};
/// # use ruff_workspace::options_base::{OptionGroup, OptionEntry, OptionField};
///
/// const options: [(&'static str, OptionEntry); 2] = [
/// ("ignore_names", OptionEntry::Field(OptionField {
@ -71,7 +71,7 @@ impl OptionGroup {
/// ### Find a nested options
///
/// ```rust
/// # use ruff::settings::options_base::{OptionGroup, OptionEntry, OptionField};
/// # use ruff_workspace::options_base::{OptionGroup, OptionEntry, OptionField};
///
/// const ignore_options: [(&'static str, OptionEntry); 2] = [
/// ("names", OptionEntry::Field(OptionField {

View file

@ -2,20 +2,25 @@
use std::path::{Path, PathBuf};
use crate::options::Options;
use anyhow::Result;
use log::debug;
use pep440_rs::VersionSpecifiers;
use ruff::settings::types::PythonVersion;
use serde::{Deserialize, Serialize};
use crate::flake8_to_ruff::pep621::Project;
use crate::settings::options::Options;
use crate::settings::types::PythonVersion;
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
struct Tools {
ruff: Option<Options>,
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
struct Project {
#[serde(alias = "requires-python", alias = "requires_python")]
requires_python: Option<VersionSpecifiers>,
}
#[derive(Debug, PartialEq, Eq, Default, Serialize, Deserialize)]
pub struct Pyproject {
tool: Option<Tools>,
project: Option<Project>,
@ -147,18 +152,18 @@ pub fn load_options<P: AsRef<Path>>(path: P) -> Result<Options> {
mod tests {
use std::str::FromStr;
use crate::options::Options;
use crate::pyproject::{find_settings_toml, parse_pyproject_toml, Pyproject, Tools};
use crate::tests::test_resource_path;
use anyhow::Result;
use ruff::codes;
use ruff::codes::RuleCodePrefix;
use ruff::line_width::LineLength;
use ruff::settings::types::PatternPrefixPair;
use rustc_hash::FxHashMap;
use crate::codes::{self, RuleCodePrefix};
use crate::line_width::LineLength;
use crate::settings::pyproject::{
find_settings_toml, parse_pyproject_toml, Options, Pyproject, Tools,
};
use crate::settings::types::PatternPrefixPair;
use crate::test::test_resource_path;
#[test]
fn deserialize() -> Result<()> {
let pyproject: Pyproject = toml::from_str(r#""#)?;
assert_eq!(pyproject.tool, None);

View file

@ -5,17 +5,20 @@ use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use std::sync::RwLock;
use anyhow::{anyhow, bail, Result};
use anyhow::Result;
use anyhow::{anyhow, bail};
use ignore::{DirEntry, WalkBuilder, WalkState};
use itertools::Itertools;
use log::debug;
use path_absolutize::path_dedot;
use rustc_hash::FxHashSet;
use rustc_hash::{FxHashMap, FxHashSet};
use crate::fs;
use crate::settings::configuration::Configuration;
use crate::settings::pyproject::settings_toml;
use crate::settings::{pyproject, AllSettings, Settings};
use crate::configuration::Configuration;
use crate::pyproject;
use crate::pyproject::settings_toml;
use ruff::fs;
use ruff::packaging::is_package;
use ruff::settings::{AllSettings, Settings};
/// The configuration information from a `pyproject.toml` file.
pub struct PyprojectConfig {
@ -45,7 +48,7 @@ impl PyprojectConfig {
/// The strategy used to discover the relevant `pyproject.toml` file for each
/// Python file.
#[derive(Debug, is_macro::Is)]
#[derive(Debug, Copy, Clone)]
pub enum PyprojectDiscoveryStrategy {
/// Use a fixed `pyproject.toml` file for all Python files (i.e., one
/// provided on the command-line).
@ -55,6 +58,16 @@ pub enum PyprojectDiscoveryStrategy {
Hierarchical,
}
impl PyprojectDiscoveryStrategy {
pub const fn is_fixed(self) -> bool {
matches!(self, PyprojectDiscoveryStrategy::Fixed)
}
pub const fn is_hierarchical(self) -> bool {
matches!(self, PyprojectDiscoveryStrategy::Hierarchical)
}
}
/// The strategy for resolving file paths in a `pyproject.toml`.
#[derive(Copy, Clone)]
pub enum Relativity {
@ -66,7 +79,7 @@ pub enum Relativity {
}
impl Relativity {
pub fn resolve(&self, path: &Path) -> PathBuf {
pub fn resolve(self, path: &Path) -> PathBuf {
match self {
Relativity::Parent => path
.parent()
@ -84,7 +97,7 @@ pub struct Resolver {
impl Resolver {
/// Add a resolved [`Settings`] under a given [`PathBuf`] scope.
pub fn add(&mut self, path: PathBuf, settings: AllSettings) {
fn add(&mut self, path: PathBuf, settings: AllSettings) {
self.settings.insert(path, settings);
}
@ -113,20 +126,76 @@ impl Resolver {
&self.resolve_all(path, pyproject_config).lib
}
/// Return a mapping from Python package to its package root.
pub fn package_roots<'a>(
&'a self,
files: &[&'a Path],
pyproject_config: &'a PyprojectConfig,
) -> FxHashMap<&'a Path, Option<&'a Path>> {
// Pre-populate the module cache, since the list of files could (but isn't
// required to) contain some `__init__.py` files.
let mut package_cache: FxHashMap<&Path, bool> = FxHashMap::default();
for file in files {
if file.ends_with("__init__.py") {
if let Some(parent) = file.parent() {
package_cache.insert(parent, true);
}
}
}
// Search for the package root for each file.
let mut package_roots: FxHashMap<&Path, Option<&Path>> = FxHashMap::default();
for file in files {
let namespace_packages = &self.resolve(file, pyproject_config).namespace_packages;
if let Some(package) = file.parent() {
if package_roots.contains_key(package) {
continue;
}
package_roots.insert(
package,
detect_package_root_with_cache(package, namespace_packages, &mut package_cache),
);
}
}
package_roots
}
/// Return an iterator over the resolved [`Settings`] in this [`Resolver`].
pub fn iter(&self) -> impl Iterator<Item = &AllSettings> {
pub fn settings(&self) -> impl Iterator<Item = &AllSettings> {
self.settings.values()
}
}
pub trait ConfigProcessor: Sync {
fn process_config(&self, config: &mut Configuration);
/// A wrapper around `detect_package_root` to cache filesystem lookups.
fn detect_package_root_with_cache<'a>(
path: &'a Path,
namespace_packages: &'a [PathBuf],
package_cache: &mut FxHashMap<&'a Path, bool>,
) -> Option<&'a Path> {
let mut current = None;
for parent in path.ancestors() {
if !is_package_with_cache(parent, namespace_packages, package_cache) {
return current;
}
current = Some(parent);
}
current
}
struct NoOpProcessor;
/// A wrapper around `is_package` to cache filesystem lookups.
fn is_package_with_cache<'a>(
path: &'a Path,
namespace_packages: &'a [PathBuf],
package_cache: &mut FxHashMap<&'a Path, bool>,
) -> bool {
*package_cache
.entry(path)
.or_insert_with(|| is_package(path, namespace_packages))
}
impl ConfigProcessor for NoOpProcessor {
fn process_config(&self, _config: &mut Configuration) {}
pub trait ConfigProcessor: Sync {
fn process_config(&self, config: &mut Configuration);
}
/// Recursively resolve a [`Configuration`] from a `pyproject.toml` file at the
@ -135,9 +204,9 @@ impl ConfigProcessor for NoOpProcessor {
// configuration file extends another in the same path, we'll re-parse the same
// file at least twice (possibly more than twice, since we'll also parse it when
// resolving the "default" configuration).
pub fn resolve_configuration(
fn resolve_configuration(
pyproject: &Path,
relativity: &Relativity,
relativity: Relativity,
processor: &dyn ConfigProcessor,
) -> Result<Configuration> {
let mut seen = FxHashSet::default();
@ -181,14 +250,14 @@ pub fn resolve_configuration(
/// Extract the project root (scope) and [`Settings`] from a given
/// `pyproject.toml`.
pub fn resolve_scoped_settings(
fn resolve_scoped_settings(
pyproject: &Path,
relativity: &Relativity,
relativity: Relativity,
processor: &dyn ConfigProcessor,
) -> Result<(PathBuf, AllSettings)> {
let configuration = resolve_configuration(pyproject, relativity, processor)?;
let project_root = relativity.resolve(pyproject);
let settings = AllSettings::from_configuration(configuration, &project_root)?;
let settings = configuration.into_all_settings(&project_root)?;
Ok((project_root, settings))
}
@ -196,23 +265,13 @@ pub fn resolve_scoped_settings(
/// configuration with the given [`ConfigProcessor`].
pub fn resolve_settings_with_processor(
pyproject: &Path,
relativity: &Relativity,
relativity: Relativity,
processor: &dyn ConfigProcessor,
) -> Result<AllSettings> {
let (_project_root, settings) = resolve_scoped_settings(pyproject, relativity, processor)?;
Ok(settings)
}
/// Return `true` if the given file should be ignored based on the exclusion
/// criteria.
fn match_exclusion<P: AsRef<Path>, R: AsRef<Path>>(
file_path: P,
file_basename: R,
exclusion: &globset::GlobSet,
) -> bool {
exclusion.is_match(file_path) || exclusion.is_match(file_basename)
}
/// Find all Python (`.py`, `.pyi` and `.ipynb` files) in a set of paths.
pub fn python_files_in_path(
paths: &[PathBuf],
@ -231,7 +290,7 @@ pub fn python_files_in_path(
if seen.insert(ancestor) {
if let Some(pyproject) = settings_toml(ancestor)? {
let (root, settings) =
resolve_scoped_settings(&pyproject, &Relativity::Parent, processor)?;
resolve_scoped_settings(&pyproject, Relativity::Parent, processor)?;
resolver.add(root, settings);
}
}
@ -303,7 +362,7 @@ pub fn python_files_in_path(
match settings_toml(entry.path()) {
Ok(Some(pyproject)) => match resolve_scoped_settings(
&pyproject,
&Relativity::Parent,
Relativity::Parent,
processor,
) {
Ok((root, settings)) => {
@ -378,7 +437,7 @@ pub fn python_file_at_path(
for ancestor in path.ancestors() {
if let Some(pyproject) = settings_toml(ancestor)? {
let (root, settings) =
resolve_scoped_settings(&pyproject, &Relativity::Parent, processor)?;
resolve_scoped_settings(&pyproject, Relativity::Parent, processor)?;
resolver.add(root, settings);
}
}
@ -423,6 +482,16 @@ fn is_file_excluded(
false
}
/// Return `true` if the given file should be ignored based on the exclusion
/// criteria.
fn match_exclusion<P: AsRef<Path>, R: AsRef<Path>>(
file_path: P,
file_basename: R,
exclusion: &globset::GlobSet,
) -> bool {
exclusion.is_match(file_path) || exclusion.is_match(file_basename)
}
#[cfg(test)]
mod tests {
use std::fs::{create_dir, File};
@ -434,14 +503,92 @@ mod tests {
use path_absolutize::Absolutize;
use tempfile::TempDir;
use crate::configuration::Configuration;
use crate::pyproject::find_settings_toml;
use ruff::settings::types::FilePattern;
use ruff::settings::AllSettings;
use crate::resolver::{
is_file_excluded, match_exclusion, python_files_in_path, resolve_settings_with_processor,
NoOpProcessor, PyprojectConfig, PyprojectDiscoveryStrategy, Relativity, Resolver,
ConfigProcessor, PyprojectConfig, PyprojectDiscoveryStrategy, Relativity, Resolver,
};
use crate::settings::pyproject::find_settings_toml;
use crate::settings::types::FilePattern;
use crate::settings::AllSettings;
use crate::test::test_resource_path;
use crate::tests::test_resource_path;
struct NoOpProcessor;
impl ConfigProcessor for NoOpProcessor {
fn process_config(&self, _config: &mut Configuration) {}
}
#[test]
fn rooted_exclusion() -> Result<()> {
let package_root = test_resource_path("package");
let resolver = Resolver::default();
let pyproject_config = PyprojectConfig::new(
PyprojectDiscoveryStrategy::Hierarchical,
resolve_settings_with_processor(
&find_settings_toml(&package_root)?.unwrap(),
Relativity::Parent,
&NoOpProcessor,
)?,
None,
);
// src/app.py should not be excluded even if it lives in a hierarchy that should
// be excluded by virtue of the pyproject.toml having `resources/*` in
// it.
assert!(!is_file_excluded(
&package_root.join("src/app.py"),
&resolver,
&pyproject_config,
));
// However, resources/ignored.py should be ignored, since that `resources` is
// beneath the package root.
assert!(is_file_excluded(
&package_root.join("resources/ignored.py"),
&resolver,
&pyproject_config,
));
Ok(())
}
#[test]
fn find_python_files() -> Result<()> {
// Initialize the filesystem:
// root
// ├── file1.py
// ├── dir1.py
// │ └── file2.py
// └── dir2.py
let tmp_dir = TempDir::new()?;
let root = tmp_dir.path();
let file1 = root.join("file1.py");
let dir1 = root.join("dir1.py");
let file2 = dir1.join("file2.py");
let dir2 = root.join("dir2.py");
File::create(&file1)?;
create_dir(dir1)?;
File::create(&file2)?;
create_dir(dir2)?;
let (paths, _) = python_files_in_path(
&[root.to_path_buf()],
&PyprojectConfig::new(
PyprojectDiscoveryStrategy::Fixed,
AllSettings::default(),
None,
),
&NoOpProcessor,
)?;
let paths = paths
.iter()
.flatten()
.map(ignore::DirEntry::path)
.sorted()
.collect::<Vec<_>>();
assert_eq!(paths, &[file2, file1]);
Ok(())
}
fn make_exclusion(file_pattern: FilePattern) -> GlobSet {
let mut builder = globset::GlobSetBuilder::new();
@ -573,74 +720,4 @@ mod tests {
&make_exclusion(exclude),
));
}
#[test]
fn rooted_exclusion() -> Result<()> {
let package_root = test_resource_path("package");
let resolver = Resolver::default();
let pyproject_config = PyprojectConfig::new(
PyprojectDiscoveryStrategy::Hierarchical,
resolve_settings_with_processor(
&find_settings_toml(&package_root)?.unwrap(),
&Relativity::Parent,
&NoOpProcessor,
)?,
None,
);
// src/app.py should not be excluded even if it lives in a hierarchy that should
// be excluded by virtue of the pyproject.toml having `resources/*` in
// it.
assert!(!is_file_excluded(
&package_root.join("src/app.py"),
&resolver,
&pyproject_config,
));
// However, resources/ignored.py should be ignored, since that `resources` is
// beneath the package root.
assert!(is_file_excluded(
&package_root.join("resources/ignored.py"),
&resolver,
&pyproject_config,
));
Ok(())
}
#[test]
fn find_python_files() -> Result<()> {
// Initialize the filesystem:
// root
// ├── file1.py
// ├── dir1.py
// │ └── file2.py
// └── dir2.py
let tmp_dir = TempDir::new()?;
let root = tmp_dir.path();
let file1 = root.join("file1.py");
let dir1 = root.join("dir1.py");
let file2 = dir1.join("file2.py");
let dir2 = root.join("dir2.py");
File::create(&file1)?;
create_dir(dir1)?;
File::create(&file2)?;
create_dir(dir2)?;
let (paths, _) = python_files_in_path(
&[root.to_path_buf()],
&PyprojectConfig::new(
PyprojectDiscoveryStrategy::Fixed,
AllSettings::default(),
None,
),
&NoOpProcessor,
)?;
let paths = paths
.iter()
.flatten()
.map(ignore::DirEntry::path)
.sorted()
.collect::<Vec<_>>();
assert_eq!(paths, &[file2, file1]);
Ok(())
}
}

8
ruff.schema.json generated
View file

@ -445,7 +445,7 @@
"description": "Options for the `pycodestyle` plugin.",
"anyOf": [
{
"$ref": "#/definitions/Pycodestyle"
"$ref": "#/definitions/PycodestyleOptions"
},
{
"type": "null"
@ -456,7 +456,7 @@
"description": "Options for the `pydocstyle` plugin.",
"anyOf": [
{
"$ref": "#/definitions/Pydocstyle"
"$ref": "#/definitions/PydocstyleOptions"
},
{
"type": "null"
@ -1487,7 +1487,7 @@
},
"additionalProperties": false
},
"Pycodestyle": {
"PycodestyleOptions": {
"type": "object",
"properties": {
"ignore-overlong-task-comments": {
@ -1511,7 +1511,7 @@
},
"additionalProperties": false
},
"Pydocstyle": {
"PydocstyleOptions": {
"type": "object",
"properties": {
"convention": {