Enable nursery rules: 'redundant_clone', 'debug_assert_with_mut_call', and 'unused_peekable' (#13920)

This commit is contained in:
Micha Reiser 2024-10-25 09:46:30 +02:00 committed by GitHub
parent 337af836d3
commit 32b57b2ee4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 28 additions and 28 deletions

View file

@ -202,6 +202,10 @@ get_unwrap = "warn"
rc_buffer = "warn" rc_buffer = "warn"
rc_mutex = "warn" rc_mutex = "warn"
rest_pat_in_fully_bound_structs = "warn" rest_pat_in_fully_bound_structs = "warn"
# nursery rules
redundant_clone = "warn"
debug_assert_with_mut_call = "warn"
unused_peekable = "warn"
[profile.release] [profile.release]
# Note that we set these explicitly, and these values # Note that we set these explicitly, and these values

View file

@ -144,7 +144,7 @@ pub fn main() -> ExitStatus {
} }
fn run() -> anyhow::Result<ExitStatus> { fn run() -> anyhow::Result<ExitStatus> {
let args = Args::parse_from(std::env::args().collect::<Vec<_>>()); let args = Args::parse_from(std::env::args());
if matches!(args.command, Some(Command::Server)) { if matches!(args.command, Some(Command::Server)) {
return run_server().map(|()| ExitStatus::Success); return run_server().map(|()| ExitStatus::Success);

View file

@ -132,7 +132,7 @@ mod tests {
#[test] #[test]
fn inequality() { fn inequality() {
let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python); let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python);
let parsed = ParsedModule::new(parsed_raw.clone()); let parsed = ParsedModule::new(parsed_raw);
let stmt = &parsed.syntax().body[0]; let stmt = &parsed.syntax().body[0];
let node = unsafe { AstNodeRef::new(parsed.clone(), stmt) }; let node = unsafe { AstNodeRef::new(parsed.clone(), stmt) };
@ -150,7 +150,7 @@ mod tests {
#[allow(unsafe_code)] #[allow(unsafe_code)]
fn debug() { fn debug() {
let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python); let parsed_raw = parse_unchecked_source("1 + 2", PySourceType::Python);
let parsed = ParsedModule::new(parsed_raw.clone()); let parsed = ParsedModule::new(parsed_raw);
let stmt = &parsed.syntax().body[0]; let stmt = &parsed.syntax().body[0];

View file

@ -1294,7 +1294,7 @@ mod tests {
search_paths: SearchPathSettings { search_paths: SearchPathSettings {
extra_paths: vec![], extra_paths: vec![],
src_root: src.clone(), src_root: src.clone(),
custom_typeshed: Some(custom_typeshed.clone()), custom_typeshed: Some(custom_typeshed),
site_packages: SitePackages::Known(vec![site_packages]), site_packages: SitePackages::Known(vec![site_packages]),
}, },
}, },
@ -1445,7 +1445,7 @@ mod tests {
assert_function_query_was_not_run( assert_function_query_was_not_run(
&db, &db,
resolve_module_query, resolve_module_query,
ModuleNameIngredient::new(&db, functools_module_name.clone()), ModuleNameIngredient::new(&db, functools_module_name),
&events, &events,
); );
assert_eq!(functools_module.search_path(), &stdlib); assert_eq!(functools_module.search_path(), &stdlib);

View file

@ -296,7 +296,7 @@ impl DefinitionNodeRef<'_> {
handler, handler,
is_star, is_star,
}) => DefinitionKind::ExceptHandler(ExceptHandlerDefinitionKind { }) => DefinitionKind::ExceptHandler(ExceptHandlerDefinitionKind {
handler: AstNodeRef::new(parsed.clone(), handler), handler: AstNodeRef::new(parsed, handler),
is_star, is_star,
}), }),
} }

View file

@ -18,6 +18,7 @@ impl<I, T: DoubleEndedIterator<Item = I>> PythonSubscript for T {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::redundant_clone)]
mod tests { mod tests {
use super::PythonSubscript; use super::PythonSubscript;

View file

@ -37,7 +37,7 @@ impl SyncNotificationHandler for DidOpenNotebookHandler {
params.cell_text_documents, params.cell_text_documents,
) )
.with_failure_code(ErrorCode::InternalError)?; .with_failure_code(ErrorCode::InternalError)?;
session.open_notebook_document(params.notebook_document.uri.clone(), notebook); session.open_notebook_document(params.notebook_document.uri, notebook);
match path { match path {
AnySystemPath::System(path) => { AnySystemPath::System(path) => {

View file

@ -110,14 +110,14 @@ impl Workspace {
pub fn check_file(&self, file_id: &FileHandle) -> Result<Vec<String>, Error> { pub fn check_file(&self, file_id: &FileHandle) -> Result<Vec<String>, Error> {
let result = self.db.check_file(file_id.file).map_err(into_error)?; let result = self.db.check_file(file_id.file).map_err(into_error)?;
Ok(result.clone()) Ok(result)
} }
/// Checks all open files /// Checks all open files
pub fn check(&self) -> Result<Vec<String>, Error> { pub fn check(&self) -> Result<Vec<String>, Error> {
let result = self.db.check().map_err(into_error)?; let result = self.db.check().map_err(into_error)?;
Ok(result.clone()) Ok(result)
} }
/// Returns the parsed AST for `path` /// Returns the parsed AST for `path`

View file

@ -88,7 +88,7 @@ where
let line_end = locator.full_line_end(script_start.end()); let line_end = locator.full_line_end(script_start.end());
let rest = locator.after(line_end); let rest = locator.after(line_end);
let mut end_offset = None; let mut end_offset = None;
let mut lines = UniversalNewlineIterator::with_offset(rest, line_end).peekable(); let mut lines = UniversalNewlineIterator::with_offset(rest, line_end);
while let Some(line) = lines.next() { while let Some(line) = lines.next() {
let Some(content) = script_line_content(&line) else { let Some(content) = script_line_content(&line) else {

View file

@ -1850,7 +1850,7 @@ static GOOGLE_ARGS_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^\s*(\*?\*?\w+)\s*(\(.*?\))?\s*:(\r\n|\n)?\s*.+").unwrap()); LazyLock::new(|| Regex::new(r"^\s*(\*?\*?\w+)\s*(\(.*?\))?\s*:(\r\n|\n)?\s*.+").unwrap());
fn args_section(context: &SectionContext) -> FxHashSet<String> { fn args_section(context: &SectionContext) -> FxHashSet<String> {
let mut following_lines = context.following_lines().peekable(); let mut following_lines = context.following_lines();
let Some(first_line) = following_lines.next() else { let Some(first_line) = following_lines.next() else {
return FxHashSet::default(); return FxHashSet::default();
}; };

View file

@ -122,10 +122,7 @@ impl TestRule for StableTestRuleSafeFix {
} else { } else {
Some( Some(
Diagnostic::new(StableTestRuleSafeFix, ruff_text_size::TextRange::default()) Diagnostic::new(StableTestRuleSafeFix, ruff_text_size::TextRange::default())
.with_fix(Fix::safe_edit(Edit::insertion( .with_fix(Fix::safe_edit(Edit::insertion(comment, TextSize::new(0)))),
comment.to_string(),
TextSize::new(0),
))),
) )
} }
} }
@ -169,10 +166,7 @@ impl TestRule for StableTestRuleUnsafeFix {
StableTestRuleUnsafeFix, StableTestRuleUnsafeFix,
ruff_text_size::TextRange::default(), ruff_text_size::TextRange::default(),
) )
.with_fix(Fix::unsafe_edit(Edit::insertion( .with_fix(Fix::unsafe_edit(Edit::insertion(comment, TextSize::new(0)))),
comment.to_string(),
TextSize::new(0),
))),
) )
} }
} }
@ -217,7 +211,7 @@ impl TestRule for StableTestRuleDisplayOnlyFix {
ruff_text_size::TextRange::default(), ruff_text_size::TextRange::default(),
) )
.with_fix(Fix::display_only_edit(Edit::insertion( .with_fix(Fix::display_only_edit(Edit::insertion(
comment.to_string(), comment,
TextSize::new(0), TextSize::new(0),
))), ))),
) )

View file

@ -60,15 +60,13 @@ pub(crate) fn derive_impl(input: DeriveInput) -> syn::Result<TokenStream> {
} }
} }
let docs: Vec<&Attribute> = struct_attributes let docs = struct_attributes
.iter() .iter()
.filter(|attr| attr.path().is_ident("doc")) .filter(|attr| attr.path().is_ident("doc"));
.collect();
// Convert the list of `doc` attributes into a single string. // Convert the list of `doc` attributes into a single string.
let doc = dedent( let doc = dedent(
&docs &docs
.into_iter()
.map(parse_doc) .map(parse_doc)
.collect::<syn::Result<Vec<_>>>()? .collect::<syn::Result<Vec<_>>>()?
.join("\n"), .join("\n"),

View file

@ -30,7 +30,10 @@ pub fn find_only_token_in_range(
let token = tokens.next().expect("Expected a token"); let token = tokens.next().expect("Expected a token");
debug_assert_eq!(token.kind(), token_kind); debug_assert_eq!(token.kind(), token_kind);
let mut tokens = tokens.skip_while(|token| token.kind == SimpleTokenKind::LParen); let mut tokens = tokens.skip_while(|token| token.kind == SimpleTokenKind::LParen);
#[allow(clippy::debug_assert_with_mut_call)]
{
debug_assert_eq!(tokens.next(), None); debug_assert_eq!(tokens.next(), None);
}
token token
} }

View file

@ -298,7 +298,7 @@ fn super_resolution_overview() {
.unwrap(); .unwrap();
} }
let snapshot = session.take_snapshot(file_url.clone()).unwrap(); let snapshot = session.take_snapshot(file_url).unwrap();
insta::assert_snapshot!( insta::assert_snapshot!(
"changed_notebook", "changed_notebook",

View file

@ -2029,11 +2029,11 @@ mod tests {
assert_override( assert_override(
vec![ vec![
RuleSelection { RuleSelection {
select: Some(vec![d417.clone()]), select: Some(vec![d417]),
..RuleSelection::default() ..RuleSelection::default()
}, },
RuleSelection { RuleSelection {
extend_select: vec![d41.clone()], extend_select: vec![d41],
..RuleSelection::default() ..RuleSelection::default()
}, },
], ],