mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-28 04:44:51 +00:00
Convert more BTree usages to Fx (#1112)
This commit is contained in:
parent
bb67fbb73a
commit
35fa2a3c32
11 changed files with 82 additions and 62 deletions
|
@ -1,6 +1,5 @@
|
|||
use std::collections::BTreeSet;
|
||||
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashSet;
|
||||
use rustpython_ast::{
|
||||
Excepthandler, ExcepthandlerKind, Expr, ExprContext, ExprKind, Location, Stmt,
|
||||
};
|
||||
|
@ -27,9 +26,9 @@ fn duplicate_handler_exceptions<'a>(
|
|||
checker: &mut Checker,
|
||||
expr: &'a Expr,
|
||||
elts: &'a [Expr],
|
||||
) -> BTreeSet<Vec<&'a str>> {
|
||||
let mut seen: BTreeSet<Vec<&str>> = BTreeSet::default();
|
||||
let mut duplicates: BTreeSet<Vec<&str>> = BTreeSet::default();
|
||||
) -> FxHashSet<Vec<&'a str>> {
|
||||
let mut seen: FxHashSet<Vec<&str>> = FxHashSet::default();
|
||||
let mut duplicates: FxHashSet<Vec<&str>> = FxHashSet::default();
|
||||
let mut unique_elts: Vec<&Expr> = Vec::default();
|
||||
for type_ in elts {
|
||||
let call_path = helpers::collect_call_paths(type_);
|
||||
|
@ -76,8 +75,8 @@ fn duplicate_handler_exceptions<'a>(
|
|||
}
|
||||
|
||||
pub fn duplicate_exceptions(checker: &mut Checker, stmt: &Stmt, handlers: &[Excepthandler]) {
|
||||
let mut seen: BTreeSet<Vec<&str>> = BTreeSet::default();
|
||||
let mut duplicates: BTreeSet<Vec<&str>> = BTreeSet::default();
|
||||
let mut seen: FxHashSet<Vec<&str>> = FxHashSet::default();
|
||||
let mut duplicates: FxHashSet<Vec<&str>> = FxHashSet::default();
|
||||
for handler in handlers {
|
||||
let ExcepthandlerKind::ExceptHandler { type_: Some(type_), .. } = &handler.node else {
|
||||
continue;
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustpython_ast::Stmt;
|
||||
|
||||
use crate::ast::types::Range;
|
||||
|
@ -10,7 +9,7 @@ pub fn check_conventional_import(
|
|||
import_from: &Stmt,
|
||||
name: &str,
|
||||
asname: Option<&str>,
|
||||
conventions: &BTreeMap<String, String>,
|
||||
conventions: &FxHashMap<String, String>,
|
||||
) -> Option<Check> {
|
||||
let mut is_valid_import = true;
|
||||
if let Some(expected_alias) = conventions.get(name) {
|
||||
|
|
|
@ -3,10 +3,11 @@ pub mod settings;
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::checks::CheckCode;
|
||||
use crate::linter::test_path;
|
||||
|
@ -33,7 +34,7 @@ mod tests {
|
|||
flake8_import_conventions::settings::Settings::from_options(
|
||||
flake8_import_conventions::settings::Options {
|
||||
aliases: None,
|
||||
extend_aliases: Some(BTreeMap::from([
|
||||
extend_aliases: Some(FxHashMap::from_iter([
|
||||
("dask.array".to_string(), "da".to_string()),
|
||||
("dask.dataframe".to_string(), "dd".to_string()),
|
||||
])),
|
||||
|
@ -56,7 +57,7 @@ mod tests {
|
|||
flake8_import_conventions:
|
||||
flake8_import_conventions::settings::Settings::from_options(
|
||||
flake8_import_conventions::settings::Options {
|
||||
aliases: Some(BTreeMap::from([
|
||||
aliases: Some(FxHashMap::from_iter([
|
||||
("altair".to_string(), "alt".to_string()),
|
||||
("matplotlib.pyplot".to_string(), "plt".to_string()),
|
||||
("pandas".to_string(), "pd".to_string()),
|
||||
|
@ -83,7 +84,7 @@ mod tests {
|
|||
flake8_import_conventions::settings::Settings::from_options(
|
||||
flake8_import_conventions::settings::Options {
|
||||
aliases: None,
|
||||
extend_aliases: Some(BTreeMap::from([(
|
||||
extend_aliases: Some(FxHashMap::from_iter([(
|
||||
"numpy".to_string(),
|
||||
"nmp".to_string(),
|
||||
)])),
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
//! Settings for import conventions.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
use itertools::Itertools;
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
const CONVENTIONAL_ALIASES: &[(&str, &str)] = &[
|
||||
|
@ -20,7 +22,7 @@ pub struct Options {
|
|||
doc = "The conventional aliases for imports. These aliases can be extended by the \
|
||||
`extend_aliases` option.",
|
||||
default = r#"{"altair": "alt", "matplotlib.pyplot": "plt", "numpy": "np", "pandas": "pd", "seaborn": "sns"}"#,
|
||||
value_type = "BTreeMap<String, String>",
|
||||
value_type = "FxHashMap<String, String>",
|
||||
example = r#"
|
||||
# Declare the default aliases.
|
||||
altair = "alt"
|
||||
|
@ -30,33 +32,41 @@ pub struct Options {
|
|||
seaborn = "sns"
|
||||
"#
|
||||
)]
|
||||
pub aliases: Option<BTreeMap<String, String>>,
|
||||
pub aliases: Option<FxHashMap<String, String>>,
|
||||
#[option(
|
||||
doc = "A mapping of modules to their conventional import aliases. These aliases will be \
|
||||
added to the `aliases` mapping.",
|
||||
default = r#"{}"#,
|
||||
value_type = "BTreeMap<String, String>",
|
||||
value_type = "FxHashMap<String, String>",
|
||||
example = r#"
|
||||
# Declare a custom alias for the `matplotlib` module.
|
||||
"dask.dataframe" = "dd"
|
||||
"#
|
||||
)]
|
||||
pub extend_aliases: Option<BTreeMap<String, String>>,
|
||||
pub extend_aliases: Option<FxHashMap<String, String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[derive(Debug)]
|
||||
pub struct Settings {
|
||||
pub aliases: BTreeMap<String, String>,
|
||||
pub aliases: FxHashMap<String, String>,
|
||||
}
|
||||
|
||||
fn default_aliases() -> BTreeMap<String, String> {
|
||||
impl Hash for Settings {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
for value in self.aliases.iter().sorted() {
|
||||
value.hash(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn default_aliases() -> FxHashMap<String, String> {
|
||||
CONVENTIONAL_ALIASES
|
||||
.iter()
|
||||
.map(|(k, v)| ((*k).to_string(), (*v).to_string()))
|
||||
.collect::<BTreeMap<_, _>>()
|
||||
.collect::<FxHashMap<_, _>>()
|
||||
}
|
||||
|
||||
fn resolve_aliases(options: Options) -> BTreeMap<String, String> {
|
||||
fn resolve_aliases(options: Options) -> FxHashMap<String, String> {
|
||||
let mut aliases = match options.aliases {
|
||||
Some(options_aliases) => options_aliases,
|
||||
None => default_aliases(),
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use std::borrow::Cow;
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs::File;
|
||||
use std::io::{BufReader, Read};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
@ -8,6 +7,7 @@ use anyhow::{anyhow, Result};
|
|||
use globset::GlobMatcher;
|
||||
use log::debug;
|
||||
use path_absolutize::{path_dedot, Absolutize};
|
||||
use rustc_hash::FxHashSet;
|
||||
use walkdir::{DirEntry, WalkDir};
|
||||
|
||||
use crate::checks::CheckCode;
|
||||
|
@ -83,8 +83,8 @@ pub fn iter_python_files<'a>(
|
|||
/// Create tree set with codes matching the pattern/code pairs.
|
||||
pub(crate) fn ignores_from_path<'a>(
|
||||
path: &Path,
|
||||
pattern_code_pairs: &'a [(GlobMatcher, GlobMatcher, BTreeSet<CheckCode>)],
|
||||
) -> Result<BTreeSet<&'a CheckCode>> {
|
||||
pattern_code_pairs: &'a [(GlobMatcher, GlobMatcher, FxHashSet<CheckCode>)],
|
||||
) -> Result<FxHashSet<&'a CheckCode>> {
|
||||
let (file_path, file_basename) = extract_path_names(path)?;
|
||||
Ok(pattern_code_pairs
|
||||
.iter()
|
||||
|
|
22
src/noqa.rs
22
src/noqa.rs
|
@ -1,4 +1,3 @@
|
|||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
|
@ -7,6 +6,7 @@ use itertools::Itertools;
|
|||
use nohash_hasher::IntMap;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use crate::checks::{Check, CheckCode, REDIRECTS};
|
||||
|
||||
|
@ -83,7 +83,7 @@ pub fn add_noqa(
|
|||
checks: &[Check],
|
||||
contents: &str,
|
||||
noqa_line_for: &IntMap<usize, usize>,
|
||||
external: &BTreeSet<String>,
|
||||
external: &FxHashSet<String>,
|
||||
) -> Result<usize> {
|
||||
let (count, output) = add_noqa_inner(checks, contents, noqa_line_for, external);
|
||||
fs::write(path, output)?;
|
||||
|
@ -94,16 +94,16 @@ fn add_noqa_inner(
|
|||
checks: &[Check],
|
||||
contents: &str,
|
||||
noqa_line_for: &IntMap<usize, usize>,
|
||||
external: &BTreeSet<String>,
|
||||
external: &FxHashSet<String>,
|
||||
) -> (usize, String) {
|
||||
let mut matches_by_line: BTreeMap<usize, BTreeSet<&CheckCode>> = BTreeMap::new();
|
||||
let mut matches_by_line: FxHashMap<usize, FxHashSet<&CheckCode>> = FxHashMap::default();
|
||||
for (lineno, line) in contents.lines().enumerate() {
|
||||
// If we hit an exemption for the entire file, bail.
|
||||
if is_file_exempt(line) {
|
||||
return (0, contents.to_string());
|
||||
}
|
||||
|
||||
let mut codes: BTreeSet<&CheckCode> = BTreeSet::new();
|
||||
let mut codes: FxHashSet<&CheckCode> = FxHashSet::default();
|
||||
for check in checks {
|
||||
if check.location.row() == lineno + 1 {
|
||||
codes.insert(check.kind.code());
|
||||
|
@ -117,7 +117,7 @@ fn add_noqa_inner(
|
|||
|
||||
if !codes.is_empty() {
|
||||
let matches = matches_by_line.entry(noqa_lineno).or_default();
|
||||
matches.append(&mut codes);
|
||||
matches.extend(codes);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -199,9 +199,9 @@ fn add_noqa_inner(
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use nohash_hasher::IntMap;
|
||||
use rustc_hash::FxHashSet;
|
||||
use rustpython_parser::ast::Location;
|
||||
|
||||
use crate::ast::types::Range;
|
||||
|
@ -227,7 +227,7 @@ mod tests {
|
|||
let checks = vec![];
|
||||
let contents = "x = 1";
|
||||
let noqa_line_for = IntMap::default();
|
||||
let external = BTreeSet::default();
|
||||
let external = FxHashSet::default();
|
||||
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for, &external);
|
||||
assert_eq!(count, 0);
|
||||
assert_eq!(output.trim(), contents.trim());
|
||||
|
@ -241,7 +241,7 @@ mod tests {
|
|||
)];
|
||||
let contents = "x = 1";
|
||||
let noqa_line_for = IntMap::default();
|
||||
let external = BTreeSet::default();
|
||||
let external = FxHashSet::default();
|
||||
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for, &external);
|
||||
assert_eq!(count, 1);
|
||||
assert_eq!(output.trim(), "x = 1 # noqa: F841".trim());
|
||||
|
@ -264,7 +264,7 @@ mod tests {
|
|||
];
|
||||
let contents = "x = 1 # noqa: E741";
|
||||
let noqa_line_for = IntMap::default();
|
||||
let external = BTreeSet::default();
|
||||
let external = FxHashSet::default();
|
||||
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for, &external);
|
||||
assert_eq!(count, 1);
|
||||
assert_eq!(output.trim(), "x = 1 # noqa: E741, F841".trim());
|
||||
|
@ -287,7 +287,7 @@ mod tests {
|
|||
];
|
||||
let contents = "x = 1 # noqa";
|
||||
let noqa_line_for = IntMap::default();
|
||||
let external = BTreeSet::default();
|
||||
let external = FxHashSet::default();
|
||||
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for, &external);
|
||||
assert_eq!(count, 1);
|
||||
assert_eq!(output.trim(), "x = 1 # noqa: E741, F841".trim());
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
use std::collections::BTreeSet;
|
||||
|
||||
use itertools::Itertools;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
@ -1402,7 +1400,7 @@ fn missing_args(checker: &mut Checker, definition: &Definition, docstrings_args:
|
|||
};
|
||||
|
||||
// Look for arguments that weren't included in the docstring.
|
||||
let mut missing_arg_names: BTreeSet<String> = BTreeSet::default();
|
||||
let mut missing_arg_names: FxHashSet<String> = FxHashSet::default();
|
||||
for arg in arguments
|
||||
.args
|
||||
.iter()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::collections::BTreeSet;
|
||||
|
||||
use itertools::Itertools;
|
||||
use log::error;
|
||||
use rustc_hash::FxHashSet;
|
||||
use rustpython_ast::{AliasData, Located};
|
||||
use rustpython_parser::ast::Stmt;
|
||||
|
||||
|
@ -38,7 +38,7 @@ pub fn unnecessary_future_import(checker: &mut Checker, stmt: &Stmt, names: &[Lo
|
|||
let target_version = checker.settings.target_version;
|
||||
|
||||
let mut removable_index: Vec<usize> = vec![];
|
||||
let mut removable_names: BTreeSet<&str> = BTreeSet::new();
|
||||
let mut removable_names: FxHashSet<&str> = FxHashSet::default();
|
||||
for (index, alias) in names.iter().enumerate() {
|
||||
let name = alias.node.name.as_str();
|
||||
if (target_version >= PythonVersion::Py33 && PY33_PLUS_REMOVE_FUTURES.contains(&name))
|
||||
|
@ -53,7 +53,13 @@ pub fn unnecessary_future_import(checker: &mut Checker, stmt: &Stmt, names: &[Lo
|
|||
return;
|
||||
}
|
||||
let mut check = Check::new(
|
||||
CheckKind::UnnecessaryFutureImport(removable_names.into_iter().map(String::from).collect()),
|
||||
CheckKind::UnnecessaryFutureImport(
|
||||
removable_names
|
||||
.into_iter()
|
||||
.map(String::from)
|
||||
.sorted()
|
||||
.collect(),
|
||||
),
|
||||
Range::from_located(stmt),
|
||||
);
|
||||
if checker.patch(check.kind.code()) {
|
||||
|
|
|
@ -2,12 +2,12 @@
|
|||
//! command-line options. Structure is optimized for internal usage, as opposed
|
||||
//! to external visibility or parsing.
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::Result;
|
||||
use globset::{Glob, GlobMatcher, GlobSet};
|
||||
use itertools::Itertools;
|
||||
use path_absolutize::path_dedot;
|
||||
use regex::Regex;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
@ -34,12 +34,12 @@ pub struct Settings {
|
|||
pub enabled: FxHashSet<CheckCode>,
|
||||
pub exclude: GlobSet,
|
||||
pub extend_exclude: GlobSet,
|
||||
pub external: BTreeSet<String>,
|
||||
pub external: FxHashSet<String>,
|
||||
pub fixable: FxHashSet<CheckCode>,
|
||||
pub format: SerializationFormat,
|
||||
pub ignore_init_module_imports: bool,
|
||||
pub line_length: usize,
|
||||
pub per_file_ignores: Vec<(GlobMatcher, GlobMatcher, BTreeSet<CheckCode>)>,
|
||||
pub per_file_ignores: Vec<(GlobMatcher, GlobMatcher, FxHashSet<CheckCode>)>,
|
||||
pub show_source: bool,
|
||||
pub src: Vec<PathBuf>,
|
||||
pub target_version: PythonVersion,
|
||||
|
@ -77,7 +77,7 @@ impl Settings {
|
|||
),
|
||||
exclude: resolve_globset(config.exclude, project_root)?,
|
||||
extend_exclude: resolve_globset(config.extend_exclude, project_root)?,
|
||||
external: BTreeSet::from_iter(config.external),
|
||||
external: FxHashSet::from_iter(config.external),
|
||||
fixable: resolve_codes(&config.fixable, &config.unfixable),
|
||||
format: config.format,
|
||||
flake8_annotations: config.flake8_annotations,
|
||||
|
@ -105,7 +105,7 @@ impl Settings {
|
|||
enabled: FxHashSet::from_iter([check_code.clone()]),
|
||||
exclude: GlobSet::empty(),
|
||||
extend_exclude: GlobSet::empty(),
|
||||
external: BTreeSet::default(),
|
||||
external: FxHashSet::default(),
|
||||
fixable: FxHashSet::from_iter([check_code]),
|
||||
format: SerializationFormat::Text,
|
||||
ignore_init_module_imports: false,
|
||||
|
@ -133,7 +133,7 @@ impl Settings {
|
|||
enabled: FxHashSet::from_iter(check_codes.clone()),
|
||||
exclude: GlobSet::empty(),
|
||||
extend_exclude: GlobSet::empty(),
|
||||
external: BTreeSet::default(),
|
||||
external: FxHashSet::default(),
|
||||
fixable: FxHashSet::from_iter(check_codes),
|
||||
format: SerializationFormat::Text,
|
||||
ignore_init_module_imports: false,
|
||||
|
@ -162,18 +162,23 @@ impl Hash for Settings {
|
|||
confusable.hash(state);
|
||||
}
|
||||
self.dummy_variable_rgx.as_str().hash(state);
|
||||
for value in &self.enabled {
|
||||
for value in self.enabled.iter().sorted() {
|
||||
value.hash(state);
|
||||
}
|
||||
self.external.hash(state);
|
||||
for value in &self.fixable {
|
||||
for value in self.external.iter().sorted() {
|
||||
value.hash(state);
|
||||
}
|
||||
for value in self.fixable.iter().sorted() {
|
||||
value.hash(state);
|
||||
}
|
||||
self.ignore_init_module_imports.hash(state);
|
||||
self.line_length.hash(state);
|
||||
for (absolute, basename, codes) in &self.per_file_ignores {
|
||||
absolute.glob().hash(state);
|
||||
basename.glob().hash(state);
|
||||
codes.hash(state);
|
||||
for value in codes.iter().sorted() {
|
||||
value.hash(state);
|
||||
}
|
||||
}
|
||||
self.show_source.hash(state);
|
||||
self.target_version.hash(state);
|
||||
|
@ -206,7 +211,7 @@ pub fn resolve_globset(
|
|||
pub fn resolve_per_file_ignores(
|
||||
per_file_ignores: Vec<PerFileIgnore>,
|
||||
project_root: Option<&PathBuf>,
|
||||
) -> Result<Vec<(GlobMatcher, GlobMatcher, BTreeSet<CheckCode>)>> {
|
||||
) -> Result<Vec<(GlobMatcher, GlobMatcher, FxHashSet<CheckCode>)>> {
|
||||
per_file_ignores
|
||||
.into_iter()
|
||||
.map(|per_file_ignore| {
|
||||
|
|
|
@ -96,7 +96,6 @@ pub fn load_options(pyproject: Option<&PathBuf>) -> Result<Options> {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::BTreeMap;
|
||||
use std::env::current_dir;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
@ -432,8 +431,11 @@ other-attribute = 1
|
|||
ban_relative_imports: Some(Strictness::Parents)
|
||||
}),
|
||||
flake8_import_conventions: Some(flake8_import_conventions::settings::Options {
|
||||
aliases: Some(BTreeMap::from([("pandas".to_string(), "pd".to_string(),)])),
|
||||
extend_aliases: Some(BTreeMap::from([(
|
||||
aliases: Some(FxHashMap::from_iter([(
|
||||
"pandas".to_string(),
|
||||
"pd".to_string(),
|
||||
)])),
|
||||
extend_aliases: Some(FxHashMap::from_iter([(
|
||||
"dask.dataframe".to_string(),
|
||||
"dd".to_string(),
|
||||
)])),
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use std::collections::BTreeSet;
|
||||
use std::env;
|
||||
use std::hash::Hash;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
@ -7,6 +6,7 @@ use std::str::FromStr;
|
|||
use anyhow::{anyhow, bail, Result};
|
||||
use clap::ValueEnum;
|
||||
use globset::{Glob, GlobSetBuilder};
|
||||
use rustc_hash::FxHashSet;
|
||||
use serde::{de, Deserialize, Deserializer, Serialize};
|
||||
|
||||
use crate::checks::CheckCode;
|
||||
|
@ -89,10 +89,10 @@ impl FromStr for FilePattern {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PerFileIgnore {
|
||||
pub pattern: String,
|
||||
pub codes: BTreeSet<CheckCode>,
|
||||
pub codes: FxHashSet<CheckCode>,
|
||||
}
|
||||
|
||||
impl PerFileIgnore {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue