1491: More clippy r=matklad a=kjeremy

A few more clippy changes.

I'm a little unsure of the second commit. It's the trivially_copy_pass_by_ref lint and there are a number of places in the code we could use it if it makes sense.

Co-authored-by: Jeremy Kolb <kjeremy@gmail.com>
This commit is contained in:
bors[bot] 2019-07-05 14:19:12 +00:00
commit ec6f71576a
5 changed files with 33 additions and 35 deletions

View file

@ -23,27 +23,24 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
// We already have some match arms, so we don't provide any assists. // We already have some match arms, so we don't provide any assists.
// Unless if there is only one trivial match arm possibly created // Unless if there is only one trivial match arm possibly created
// by match postfix complete. Trivial match arm is the catch all arm. // by match postfix complete. Trivial match arm is the catch all arm.
match match_expr.match_arm_list() { if let Some(arm_list) = match_expr.match_arm_list() {
Some(arm_list) => { let mut arm_iter = arm_list.arms();
let mut arm_iter = arm_list.arms(); let first = arm_iter.next();
let first = arm_iter.next();
match first { match first {
// If there arm list is empty or there is only one trivial arm, then proceed. // If there arm list is empty or there is only one trivial arm, then proceed.
Some(arm) if is_trivial_arm(arm) => { Some(arm) if is_trivial_arm(arm) => {
if arm_iter.next() != None { if arm_iter.next() != None {
return None;
}
}
None => {}
_ => {
return None; return None;
} }
} }
None => {}
_ => {
return None;
}
} }
_ => {} };
}
let expr = match_expr.expr()?; let expr = match_expr.expr()?;
let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None); let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None);

View file

@ -131,7 +131,8 @@ impl CrateGraph {
if self.dfs_find(from, to, &mut FxHashSet::default()) { if self.dfs_find(from, to, &mut FxHashSet::default()) {
return Err(CyclicDependencies); return Err(CyclicDependencies);
} }
Ok(self.arena.get_mut(&from).unwrap().add_dep(name, to)) self.arena.get_mut(&from).unwrap().add_dep(name, to);
Ok(())
} }
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {

View file

@ -240,10 +240,10 @@ impl Module {
} }
pub fn path_to_root(self, db: &impl HirDatabase) -> Vec<Module> { pub fn path_to_root(self, db: &impl HirDatabase) -> Vec<Module> {
let mut res = vec![self.clone()]; let mut res = vec![self];
let mut curr = self.clone(); let mut curr = self;
while let Some(next) = curr.parent(db) { while let Some(next) = curr.parent(db) {
res.push(next.clone()); res.push(next);
curr = next curr = next
} }
res res
@ -299,7 +299,7 @@ impl Module {
.collect() .collect()
} }
fn with_module_id(&self, module_id: CrateModuleId) -> Module { fn with_module_id(self, module_id: CrateModuleId) -> Module {
Module { module_id, krate: self.krate } Module { module_id, krate: self.krate }
} }
} }
@ -463,33 +463,33 @@ pub struct EnumVariant {
} }
impl EnumVariant { impl EnumVariant {
pub fn module(&self, db: &impl HirDatabase) -> Module { pub fn module(self, db: &impl HirDatabase) -> Module {
self.parent.module(db) self.parent.module(db)
} }
pub fn parent_enum(&self, _db: &impl DefDatabase) -> Enum { pub fn parent_enum(self, _db: &impl DefDatabase) -> Enum {
self.parent self.parent
} }
pub fn name(&self, db: &impl DefDatabase) -> Option<Name> { pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
db.enum_data(self.parent).variants[self.id].name.clone() db.enum_data(self.parent).variants[self.id].name.clone()
} }
pub fn fields(&self, db: &impl HirDatabase) -> Vec<StructField> { pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
self.variant_data(db) self.variant_data(db)
.fields() .fields()
.into_iter() .into_iter()
.flat_map(|it| it.iter()) .flat_map(|it| it.iter())
.map(|(id, _)| StructField { parent: (*self).into(), id }) .map(|(id, _)| StructField { parent: self.into(), id })
.collect() .collect()
} }
pub fn field(&self, db: &impl HirDatabase, name: &Name) -> Option<StructField> { pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
self.variant_data(db) self.variant_data(db)
.fields() .fields()
.into_iter() .into_iter()
.flat_map(|it| it.iter()) .flat_map(|it| it.iter())
.find(|(_id, data)| data.name == *name) .find(|(_id, data)| data.name == *name)
.map(|(id, _)| StructField { parent: (*self).into(), id }) .map(|(id, _)| StructField { parent: self.into(), id })
} }
} }
@ -517,11 +517,11 @@ impl DefWithBody {
} }
/// Builds a resolver for code inside this item. /// Builds a resolver for code inside this item.
pub(crate) fn resolver(&self, db: &impl HirDatabase) -> Resolver { pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
match *self { match self {
DefWithBody::Const(ref c) => c.resolver(db), DefWithBody::Const(c) => c.resolver(db),
DefWithBody::Function(ref f) => f.resolver(db), DefWithBody::Function(f) => f.resolver(db),
DefWithBody::Static(ref s) => s.resolver(db), DefWithBody::Static(s) => s.resolver(db),
} }
} }
} }

View file

@ -529,7 +529,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
match matching_def { match matching_def {
Some(_) => { Some(_) => {
self.write_assoc_resolution(id, item); self.write_assoc_resolution(id, item);
return matching_def; matching_def
} }
None => None, None => None,
} }

View file

@ -262,7 +262,7 @@ where
fn byte_from_char(c: char) -> u8 { fn byte_from_char(c: char) -> u8 {
let res = c as u32; let res = c as u32;
assert!(res <= u8::max_value() as u32, "guaranteed because of Mode::Byte"); assert!(res <= u32::from(u8::max_value()), "guaranteed because of Mode::Byte");
res as u8 res as u8
} }