update with new clippy lints

This commit is contained in:
Folkert 2022-03-31 19:34:51 +02:00
parent 51b360b578
commit 0dd2cec09a
No known key found for this signature in database
GPG key ID: 1F17F6FFD112B97C
22 changed files with 53 additions and 77 deletions

View file

@ -243,7 +243,7 @@ fn add_header_mn_list(
str_vec
.iter()
.enumerate()
.map(|(indx, provide_str)| {
.flat_map(|(indx, provide_str)| {
let provide_str = header_val_mn(
provide_str.to_owned(),
ast_node_id,
@ -266,7 +266,6 @@ fn add_header_mn_list(
vec![provide_str]
}
})
.flatten()
.collect()
}

View file

@ -395,7 +395,7 @@ fn tree_as_string_helper(
.to_owned();
let child = mark_node_pool.get(child_id);
let child_str = format!("{}", mark_node_pool.get(child_id)).replace("\n", "\\n");
let child_str = format!("{}", mark_node_pool.get(child_id)).replace('\n', "\\n");
full_str.push_str(&format!("{} mn_id {}\n", child_str, child_id));

View file

@ -947,7 +947,7 @@ fn link_macos(
Err(_) => "".to_string(),
};
for roc_link_flag in roc_link_flags.split_whitespace() {
ld_command.arg(roc_link_flag.to_string());
ld_command.arg(roc_link_flag);
}
ld_command.args(&[

View file

@ -166,8 +166,7 @@ fn sort_type_defs_before_introduction(
Ok(result) => result
.iter()
.rev()
.map(|group_index| sccs[*group_index].iter())
.flatten()
.flat_map(|group_index| sccs[*group_index].iter())
.copied()
.collect(),

View file

@ -281,8 +281,7 @@ pub fn canonicalize_module_defs<'a>(
let transitive_builtins: Vec<Symbol> = referenced_values
.iter()
.filter(|s| s.is_builtin())
.map(|s| crate::builtins::builtin_dependencies(*s))
.flatten()
.flat_map(|s| crate::builtins::builtin_dependencies(*s))
.copied()
.collect();

View file

@ -110,7 +110,7 @@ pub enum ParsedNumResult {
pub fn finish_parsing_num(raw: &str) -> Result<ParsedNumResult, (&str, IntErrorKind)> {
// Ignore underscores.
let radix = 10;
from_str_radix(raw.replace("_", "").as_str(), radix).map_err(|e| (raw, e))
from_str_radix(raw.replace('_', "").as_str(), radix).map_err(|e| (raw, e))
}
#[inline(always)]
@ -128,9 +128,9 @@ pub fn finish_parsing_base(
// Ignore underscores, insert - when negative to get correct underflow/overflow behavior
(if is_negative {
from_str_radix(format!("-{}", raw.replace("_", "")).as_str(), radix)
from_str_radix(format!("-{}", raw.replace('_', "")).as_str(), radix)
} else {
from_str_radix(raw.replace("_", "").as_str(), radix)
from_str_radix(raw.replace('_', "").as_str(), radix)
})
.and_then(|parsed| match parsed {
ParsedNumResult::Float(..) => Err(IntErrorKind::FloatSuffix),
@ -154,7 +154,7 @@ pub fn finish_parsing_float(raw: &str) -> Result<(f64, FloatBound), (&str, Float
};
// Ignore underscores.
match raw_without_suffix.replace("_", "").parse::<f64>() {
match raw_without_suffix.replace('_', "").parse::<f64>() {
Ok(float) if float.is_finite() => Ok((float, bound)),
Ok(float) => {
if float.is_sign_positive() {

View file

@ -185,8 +185,7 @@ fn is_exhaustive(matrix: &RefPatternMatrix, n: usize) -> PatternMatrix {
alt_list
.iter()
.cloned()
.map(is_alt_exhaustive)
.flatten()
.flat_map(is_alt_exhaustive)
.collect()
}
}

View file

@ -884,10 +884,8 @@ pub fn build_exp_literal<'a, 'ctx, 'env>(
let struct_type = str_type;
let mut struct_val;
// Store the pointer
struct_val = builder
let mut struct_val = builder
.build_insert_value(
struct_type.get_undef(),
ptr,
@ -2113,8 +2111,7 @@ fn reserve_with_refcount_union_as_block_of_memory<'a, 'ctx, 'env>(
let alignment_bytes = fields
.iter()
.map(|tag| tag.iter().map(|l| l.alignment_bytes(env.target_info)))
.flatten()
.flat_map(|tag| tag.iter().map(|l| l.alignment_bytes(env.target_info)))
.max()
.unwrap_or(0);

View file

@ -1285,10 +1285,8 @@ pub fn store_list<'a, 'ctx, 'env>(
let struct_type = super::convert::zig_list_type(env);
let mut struct_val;
// Store the pointer
struct_val = builder
let mut struct_val = builder
.build_insert_value(
struct_type.get_undef(),
pass_as_opaque(env, pointer_to_first_element),

View file

@ -121,8 +121,7 @@ fn start_phase<'a>(
Recurse(new) => {
return new
.into_iter()
.map(|(module_id, phase)| start_phase(module_id, phase, arena, state))
.flatten()
.flat_map(|(module_id, phase)| start_phase(module_id, phase, arena, state))
.collect()
}
}

View file

@ -5971,7 +5971,7 @@ fn substitute_in_stmt_help<'a>(
if opt_remainder.is_some() || opt_continuation.is_some() {
let remainder = opt_remainder.unwrap_or(remainder);
let continuation = opt_continuation.unwrap_or_else(|| *continuation);
let continuation = opt_continuation.unwrap_or(*continuation);
Some(arena.alloc(Join {
id: *id,

View file

@ -1180,8 +1180,7 @@ impl<'a> Layout<'a> {
match variant {
NonRecursive(fields) => fields
.iter()
.map(|ls| ls.iter())
.flatten()
.flat_map(|ls| ls.iter())
.any(|f| f.contains_refcounted()),
Recursive(_)
| NullableWrapped { .. }

View file

@ -117,7 +117,7 @@ fn insert_jumps<'a>(
if opt_remainder.is_some() || opt_continuation.is_some() {
let remainder = opt_remainder.unwrap_or(remainder);
let continuation = opt_continuation.unwrap_or_else(|| *continuation);
let continuation = opt_continuation.unwrap_or(*continuation);
Some(arena.alloc(Join {
id: *id,

View file

@ -966,8 +966,7 @@ impl Type {
Self::contains_symbol_ext(ext, rep_symbol)
|| tags
.iter()
.map(|v| v.1.iter())
.flatten()
.flat_map(|v| v.1.iter())
.any(|arg| arg.contains_symbol(rep_symbol))
}
@ -1026,8 +1025,7 @@ impl Type {
Self::contains_variable_ext(ext, rep_variable)
|| tags
.iter()
.map(|v| v.1.iter())
.flatten()
.flat_map(|v| v.1.iter())
.any(|arg| arg.contains_variable(rep_variable))
}
@ -1235,7 +1233,7 @@ impl Type {
substitution.clear();
substitution.insert(rec_var, Type::Variable(new_rec_var));
for typ in tags.iter_mut().map(|v| v.1.iter_mut()).flatten() {
for typ in tags.iter_mut().flat_map(|v| v.1.iter_mut()) {
typ.substitute(&substitution);
}
@ -1353,7 +1351,7 @@ fn symbols_help(initial: &Type) -> Vec<Symbol> {
}
RecursiveTagUnion(_, tags, ext) | TagUnion(tags, ext) => {
stack.extend(ext);
stack.extend(tags.iter().map(|v| v.1.iter()).flatten());
stack.extend(tags.iter().flat_map(|v| v.1.iter()));
}
Record(fields, ext) => {
@ -2377,8 +2375,7 @@ pub fn gather_fields_unsorted_iter(
let it = stack
.into_iter()
.map(|fields| fields.iter_all())
.flatten()
.flat_map(|fields| fields.iter_all())
.map(move |(i1, i2, i3)| {
let field_name: &Lowercase = &subs[i1];
let variable = subs[i2];
@ -2462,8 +2459,7 @@ pub fn gather_tags_unsorted_iter(
let it = stack
.into_iter()
.map(|union_tags| union_tags.iter_all())
.flatten()
.flat_map(|union_tags| union_tags.iter_all())
.map(move |(i1, i2)| {
let tag_name: &TagName = &subs[i1];
let subs_slice = subs[i2];

View file

@ -262,11 +262,7 @@ fn check_valid_range(
range: VariableSubsSlice,
mode: Mode,
) -> Outcome {
let slice = subs
.get_subs_slice(range)
.iter()
.copied()
.collect::<Vec<_>>();
let slice = subs.get_subs_slice(range).to_vec();
let mut it = slice.iter().peekable();
while let Some(&possible_var) = it.next() {
@ -1432,7 +1428,7 @@ fn unify_recursion(
} => {
// NOTE: structure and other_structure may not be unified yet, but will be
// we should not do that here, it would create an infinite loop!
let name = (*opt_name).or_else(|| *other_opt_name);
let name = (*opt_name).or(*other_opt_name);
merge(
subs,
ctx,

View file

@ -83,7 +83,7 @@ pub fn generate_docs_html(filenames: Vec<PathBuf>, build_dir: &Path) {
// Write each package's module docs html file
for loaded_module in package.modules.iter_mut() {
for module_docs in loaded_module.documentation.values() {
let module_dir = build_dir.join(module_docs.name.replace(".", "/").as_str());
let module_dir = build_dir.join(module_docs.name.replace('.', "/").as_str());
fs::create_dir_all(&module_dir)
.expect("TODO gracefully handle not being able to create the module dir");

View file

@ -70,7 +70,7 @@ impl fmt::Display for CodeLines {
.collect::<Vec<String>>()
.join(" ");
let escaped_row_str = row_str.replace("\n", "\\n");
let escaped_row_str = row_str.replace('\n', "\\n");
write!(f, "\n{}", escaped_row_str)?;
}

View file

@ -478,22 +478,18 @@ fn read_main_roc_file(project_dir_path_opt: Option<&Path>) -> (PathStr, String)
.unwrap_or_else(|err| panic!("Failed to list items in project directory: {:?}", err))
.items;
let file_names = dir_items
.iter()
.map(|info_hash_map| {
let file_names = dir_items.iter().flat_map(|info_hash_map| {
info_hash_map
.values()
.map(|dir_entry_value| {
.filter_map(|dir_entry_value| {
if let DirEntryValue::String(file_name) = dir_entry_value {
Some(file_name)
} else {
None
}
})
.flatten() // remove None
.collect::<Vec<&String>>()
})
.flatten();
});
let roc_file_names: Vec<&String> = file_names
.filter(|file_name| file_name.contains(".roc"))

View file

@ -137,7 +137,7 @@ fn markup_to_wgpu_helper<'a>(
} => {
let highlight_color = map_get(&code_style.ed_theme.syntax_high_map, syn_high_style)?;
let full_content = markup_node.get_full_content().replace("\n", "\\n"); // any \n left here should be escaped so that it can be shown as \n
let full_content = markup_node.get_full_content().replace('\n', "\\n"); // any \n left here should be escaped so that it can be shown as \n
let glyph_text = glyph_brush::OwnedText::new(&full_content)
.with_color(colors::to_slice(*highlight_color))

View file

@ -416,7 +416,7 @@ fn preprocess_impl(
})
.map(|(_, reloc)| reloc)
.filter(|reloc| matches!(reloc.kind(), RelocationKind::Elf(6)))
.map(|reloc| {
.filter_map(|reloc| {
for symbol in app_syms.iter() {
if reloc.target() == RelocationTarget::Symbol(symbol.index()) {
return Some((symbol.name().unwrap().to_string(), symbol.index().0));
@ -424,7 +424,6 @@ fn preprocess_impl(
}
None
})
.flatten()
.collect();
for sym in app_syms.iter() {

View file

@ -1731,10 +1731,10 @@ fn to_precord_report<'a>(
},
PRecord::Colon(_) => {
unreachable!("because `{ foo }` is a valid field; the colon is not required")
unreachable!("because `foo` is a valid field; the colon is not required")
}
PRecord::Optional(_) => {
unreachable!("because `{ foo }` is a valid field; the question mark is not required")
unreachable!("because `foo` is a valid field; the question mark is not required")
}
PRecord::Pattern(pattern, pos) => to_pattern_report(alloc, lines, filename, pattern, pos),
@ -1823,11 +1823,11 @@ fn to_precord_report<'a>(
}
PRecord::IndentColon(_) => {
unreachable!("because `{ foo }` is a valid field; the colon is not required")
unreachable!("because `foo` is a valid field; the colon is not required")
}
PRecord::IndentOptional(_) => {
unreachable!("because `{ foo }` is a valid field; the question mark is not required")
unreachable!("because `foo` is a valid field; the question mark is not required")
}
PRecord::Space(error, pos) => to_space_report(alloc, lines, filename, &error, pos),
@ -2288,10 +2288,10 @@ fn to_trecord_report<'a>(
},
ETypeRecord::Colon(_) => {
unreachable!("because `{ foo }` is a valid field; the colon is not required")
unreachable!("because `foo` is a valid field; the colon is not required")
}
ETypeRecord::Optional(_) => {
unreachable!("because `{ foo }` is a valid field; the question mark is not required")
unreachable!("because `foo` is a valid field; the question mark is not required")
}
ETypeRecord::Type(tipe, pos) => to_type_report(alloc, lines, filename, tipe, pos),
@ -2371,11 +2371,11 @@ fn to_trecord_report<'a>(
}
ETypeRecord::IndentColon(_) => {
unreachable!("because `{ foo }` is a valid field; the colon is not required")
unreachable!("because `foo` is a valid field; the colon is not required")
}
ETypeRecord::IndentOptional(_) => {
unreachable!("because `{ foo }` is a valid field; the question mark is not required")
unreachable!("because `foo` is a valid field; the question mark is not required")
}
ETypeRecord::Space(error, pos) => to_space_report(alloc, lines, filename, &error, pos),

View file

@ -774,7 +774,7 @@ fn preprocess_op(
debug_assert_eq!(inputs.len(), 1);
let cont_binding = continuations_in_scope[continuation]
.ok_or_else(|| ErrorKind::ContinuationNotInScope(*continuation))?;
.ok_or(ErrorKind::ContinuationNotInScope(*continuation))?;
check_type(ctx.nc, tc, cont_binding.arg_type, input_types[0])?;
@ -1045,7 +1045,7 @@ fn preprocess_op(
let field_types = try_get_tuple_field_types(ctx.nc, tc, tuple_type)?;
let field_type = *field_types
.get(*field_idx as usize)
.ok_or_else(|| ErrorKind::TupleFieldOutOfRange(*field_idx))?;
.ok_or(ErrorKind::TupleFieldOutOfRange(*field_idx))?;
let value = graph_builder.add_op(
block,
ir::OpKind::GetTupleField {
@ -1068,7 +1068,7 @@ fn preprocess_op(
.collect();
let this_variant_type = *tc_variant_types
.get(*variant_idx as usize)
.ok_or_else(|| ErrorKind::UnionVariantOutOfRange(*variant_idx))?;
.ok_or(ErrorKind::UnionVariantOutOfRange(*variant_idx))?;
check_type(ctx.nc, tc, this_variant_type, input_types[0])?;
let union_type = tc.types.get_or_insert(TypeData::Union {
variants: tc_variant_types,
@ -1089,7 +1089,7 @@ fn preprocess_op(
let variant_types = try_get_union_variant_types(ctx.nc, tc, input_types[0])?;
let this_variant_type = *variant_types
.get(*variant_idx as usize)
.ok_or_else(|| ErrorKind::UnionVariantOutOfRange(*variant_idx))?;
.ok_or(ErrorKind::UnionVariantOutOfRange(*variant_idx))?;
let value = graph_builder.add_op(
block,
ir::OpKind::UnwrapUnion {