clippy::complexity simplifications related to Iterators

This commit is contained in:
Matthias Krüger 2021-03-21 13:13:34 +01:00
parent bd407a9882
commit ae7e55c1dd
12 changed files with 64 additions and 80 deletions

View file

@ -213,7 +213,7 @@ impl Crate {
Some(TokenTree::Leaf(Leaf::Literal(Literal{ref text, ..}))) => Some(text), Some(TokenTree::Leaf(Leaf::Literal(Literal{ref text, ..}))) => Some(text),
_ => None _ => None
} }
}).flat_map(|t| t).next(); }).flatten().next();
doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/") doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
} }

View file

@ -638,7 +638,7 @@ fn collect_attrs(
owner: &dyn ast::AttrsOwner, owner: &dyn ast::AttrsOwner,
) -> impl Iterator<Item = Either<ast::Attr, ast::Comment>> { ) -> impl Iterator<Item = Either<ast::Attr, ast::Comment>> {
let (inner_attrs, inner_docs) = inner_attributes(owner.syntax()) let (inner_attrs, inner_docs) = inner_attributes(owner.syntax())
.map_or((None, None), |(attrs, docs)| ((Some(attrs), Some(docs)))); .map_or((None, None), |(attrs, docs)| (Some(attrs), Some(docs)));
let outer_attrs = owner.attrs().filter(|attr| attr.excl_token().is_none()); let outer_attrs = owner.attrs().filter(|attr| attr.excl_token().is_none());
let attrs = outer_attrs let attrs = outer_attrs

View file

@ -38,7 +38,7 @@ impl<'a> InferenceContext<'a> {
let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
let (pre, post) = match ellipsis { let (pre, post) = match ellipsis {
Some(idx) => subpats.split_at(idx), Some(idx) => subpats.split_at(idx),
None => (&subpats[..], &[][..]), None => (subpats, &[][..]),
}; };
let post_idx_offset = field_tys.iter().count() - post.len(); let post_idx_offset = field_tys.iter().count() - post.len();

View file

@ -263,11 +263,10 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
) -> Option<SyntaxToken> { ) -> Option<SyntaxToken> {
node.siblings_with_tokens(dir) node.siblings_with_tokens(dir)
.skip(1) .skip(1)
.skip_while(|node| match node { .find(|node| match node {
NodeOrToken::Node(_) => false, NodeOrToken::Node(_) => true,
NodeOrToken::Token(it) => is_single_line_ws(it), NodeOrToken::Token(it) => !is_single_line_ws(it),
}) })
.next()
.and_then(|it| it.into_token()) .and_then(|it| it.into_token())
.filter(|node| node.kind() == delimiter_kind) .filter(|node| node.kind() == delimiter_kind)
} }

View file

@ -1,5 +1,4 @@
use itertools::Itertools; use itertools::Itertools;
use std::convert::identity;
use syntax::{ use syntax::{
ast::{ ast::{
self, self,
@ -140,7 +139,7 @@ fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> {
.filter(|s| !skippable(s)) .filter(|s| !skippable(s))
.map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix)) .map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix))
.take_while(|opt_com| opt_com.is_some()) .take_while(|opt_com| opt_com.is_some())
.filter_map(identity) .flatten()
.skip(1); // skip the first element so we don't duplicate it in next_comments .skip(1); // skip the first element so we don't duplicate it in next_comments
let next_comments = comment let next_comments = comment
@ -149,7 +148,7 @@ fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> {
.filter(|s| !skippable(s)) .filter(|s| !skippable(s))
.map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix)) .map(|not| not.into_token().and_then(Comment::cast).filter(same_prefix))
.take_while(|opt_com| opt_com.is_some()) .take_while(|opt_com| opt_com.is_some())
.filter_map(identity); .flatten();
let mut comments: Vec<_> = prev_comments.collect(); let mut comments: Vec<_> = prev_comments.collect();
comments.reverse(); comments.reverse();

View file

@ -136,18 +136,13 @@ impl Refs {
.into_iter() .into_iter()
.filter(|r| { .filter(|r| {
if let Def::ModuleDef(ModuleDef::Trait(tr)) = r.def { if let Def::ModuleDef(ModuleDef::Trait(tr)) = r.def {
if tr if tr.items(ctx.db()).into_iter().any(|ai| {
.items(ctx.db()) if let AssocItem::Function(f) = ai {
.into_iter()
.find(|ai| {
if let AssocItem::Function(f) = *ai {
Def::ModuleDef(ModuleDef::Function(f)).is_referenced_in(ctx) Def::ModuleDef(ModuleDef::Function(f)).is_referenced_in(ctx)
} else { } else {
false false
} }
}) }) {
.is_some()
{
return true; return true;
} }
} }

View file

@ -95,7 +95,7 @@ fn compute_method_ranks(path: &ast::Path, ctx: &AssistContext) -> Option<FxHashM
_ => None, _ => None,
}) })
.enumerate() .enumerate()
.map(|(idx, func)| ((func.name(ctx.db()).to_string(), idx))) .map(|(idx, func)| (func.name(ctx.db()).to_string(), idx))
.collect(), .collect(),
) )
} }

View file

@ -71,7 +71,7 @@ fn test_has_block_expr_parent() {
} }
pub(crate) fn has_bind_pat_parent(element: SyntaxElement) -> bool { pub(crate) fn has_bind_pat_parent(element: SyntaxElement) -> bool {
element.ancestors().find(|it| it.kind() == IDENT_PAT).is_some() element.ancestors().any(|it| it.kind() == IDENT_PAT)
} }
#[test] #[test]
fn test_has_bind_pat_parent() { fn test_has_bind_pat_parent() {

View file

@ -67,7 +67,7 @@ impl ParsedRule {
) -> Result<Vec<ParsedRule>, SsrError> { ) -> Result<Vec<ParsedRule>, SsrError> {
let raw_pattern = pattern.as_rust_code(); let raw_pattern = pattern.as_rust_code();
let raw_template = template.map(|t| t.as_rust_code()); let raw_template = template.map(|t| t.as_rust_code());
let raw_template = raw_template.as_ref().map(|s| s.as_str()); let raw_template = raw_template.as_deref();
let mut builder = RuleBuilder { let mut builder = RuleBuilder {
placeholders_by_stand_in: pattern.placeholders_by_stand_in(), placeholders_by_stand_in: pattern.placeholders_by_stand_in(),
rules: Vec::new(), rules: Vec::new(),

View file

@ -1225,8 +1225,7 @@ macro_rules! m {
) )
.expand_statements(r#"m!(C("0"))"#) .expand_statements(r#"m!(C("0"))"#)
.descendants() .descendants()
.find(|token| token.kind() == ERROR) .any(|token| token.kind() == ERROR));
.is_some());
} }
#[test] #[test]

View file

@ -137,15 +137,10 @@ fn collect_from_workspace(
let stdout = BufReader::new(child_stdout); let stdout = BufReader::new(child_stdout);
let mut res = BuildDataMap::default(); let mut res = BuildDataMap::default();
for message in cargo_metadata::Message::parse_stream(stdout) { for message in cargo_metadata::Message::parse_stream(stdout).flatten() {
if let Ok(message) = message {
match message { match message {
Message::BuildScriptExecuted(BuildScript { Message::BuildScriptExecuted(BuildScript {
package_id, package_id, out_dir, cfgs, env, ..
out_dir,
cfgs,
env,
..
}) => { }) => {
let cfgs = { let cfgs = {
let mut acc = Vec::new(); let mut acc = Vec::new();
@ -176,8 +171,7 @@ fn collect_from_workspace(
if message.target.kind.contains(&"proc-macro".to_string()) { if message.target.kind.contains(&"proc-macro".to_string()) {
let package_id = message.package_id; let package_id = message.package_id;
// Skip rmeta file // Skip rmeta file
if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name)) if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name)) {
{
let filename = AbsPathBuf::assert(PathBuf::from(&filename)); let filename = AbsPathBuf::assert(PathBuf::from(&filename));
let res = res.entry(package_id.repr.clone()).or_default(); let res = res.entry(package_id.repr.clone()).or_default();
res.proc_macro_dylib_path = Some(filename); res.proc_macro_dylib_path = Some(filename);
@ -192,7 +186,6 @@ fn collect_from_workspace(
_ => {} _ => {}
} }
} }
}
for package in packages { for package in packages {
let build_data = res.entry(package.id.repr.clone()).or_default(); let build_data = res.entry(package.id.repr.clone()).or_default();

View file

@ -461,8 +461,7 @@ impl ast::MatchArmList {
let end = if let Some(comma) = start let end = if let Some(comma) = start
.siblings_with_tokens(Direction::Next) .siblings_with_tokens(Direction::Next)
.skip(1) .skip(1)
.skip_while(|it| it.kind().is_trivia()) .find(|it| !it.kind().is_trivia())
.next()
.filter(|it| it.kind() == T![,]) .filter(|it| it.kind() == T![,])
{ {
comma comma