Auto merge of #13929 - danieleades:simplify, r=lnicola

internal: a number of code simplifications
This commit is contained in:
bors 2023-01-11 09:38:34 +00:00
commit f32f64bffc
22 changed files with 161 additions and 202 deletions

View file

@ -80,7 +80,7 @@ impl<'a> Write for Printer<'a> {
fn write_str(&mut self, s: &str) -> fmt::Result { fn write_str(&mut self, s: &str) -> fmt::Result {
for line in s.split_inclusive('\n') { for line in s.split_inclusive('\n') {
if self.needs_indent { if self.needs_indent {
match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() { match self.buf.chars().rev().find(|ch| *ch != ' ') {
Some('\n') | None => {} Some('\n') | None => {}
_ => self.buf.push('\n'), _ => self.buf.push('\n'),
} }
@ -113,7 +113,7 @@ impl<'a> Printer<'a> {
} }
fn newline(&mut self) { fn newline(&mut self) {
match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() { match self.buf.chars().rev().find(|ch| *ch != ' ') {
Some('\n') | None => {} Some('\n') | None => {}
_ => writeln!(self).unwrap(), _ => writeln!(self).unwrap(),
} }

View file

@ -1600,17 +1600,15 @@ impl ModCollector<'_, '_> {
FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db); FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
if self.def_collector.is_proc_macro { if self.def_collector.is_proc_macro && self.module_id == def_map.root {
if self.module_id == def_map.root { if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) {
if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { let crate_root = def_map.module_id(def_map.root);
let crate_root = def_map.module_id(def_map.root); self.def_collector.export_proc_macro(
self.def_collector.export_proc_macro( proc_macro,
proc_macro, ItemTreeId::new(self.tree_id, id),
ItemTreeId::new(self.tree_id, id), fn_id,
fn_id, crate_root,
crate_root, );
);
}
} }
} }

View file

@ -1136,18 +1136,16 @@ impl<'a> InferenceContext<'a> {
if self.diverges.is_always() { if self.diverges.is_always() {
// we don't even make an attempt at coercion // we don't even make an attempt at coercion
self.table.new_maybe_never_var() self.table.new_maybe_never_var()
} else { } else if let Some(t) = expected.only_has_type(&mut self.table) {
if let Some(t) = expected.only_has_type(&mut self.table) { if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() {
if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() { self.result.type_mismatches.insert(
self.result.type_mismatches.insert( expr.into(),
expr.into(), TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() },
TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() }, );
);
}
t
} else {
TyBuilder::unit()
} }
t
} else {
TyBuilder::unit()
} }
} }
} }
@ -1314,13 +1312,13 @@ impl<'a> InferenceContext<'a> {
} else { } else {
param_ty param_ty
}; };
if !coercion_target.is_unknown() { if !coercion_target.is_unknown()
if self.coerce(Some(arg), &ty, &coercion_target).is_err() { && self.coerce(Some(arg), &ty, &coercion_target).is_err()
self.result.type_mismatches.insert( {
arg.into(), self.result.type_mismatches.insert(
TypeMismatch { expected: coercion_target, actual: ty.clone() }, arg.into(),
); TypeMismatch { expected: coercion_target, actual: ty.clone() },
} );
} }
} }
} }

View file

@ -251,17 +251,14 @@ fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result<Layout, La
fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty { fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty {
match pointee.kind(Interner) { match pointee.kind(Interner) {
TyKind::Adt(AdtId(adt), subst) => match adt { TyKind::Adt(AdtId(hir_def::AdtId::StructId(i)), subst) => {
&hir_def::AdtId::StructId(i) => { let data = db.struct_data(*i);
let data = db.struct_data(i); let mut it = data.variant_data.fields().iter().rev();
let mut it = data.variant_data.fields().iter().rev(); match it.next() {
match it.next() { Some((f, _)) => field_ty(db, (*i).into(), f, subst),
Some((f, _)) => field_ty(db, i.into(), f, subst), None => pointee,
None => pointee,
}
} }
_ => pointee, }
},
_ => pointee, _ => pointee,
} }
} }

View file

@ -472,8 +472,8 @@ impl Module {
let def_map = self.id.def_map(db.upcast()); let def_map = self.id.def_map(db.upcast());
let children = def_map[self.id.local_id] let children = def_map[self.id.local_id]
.children .children
.iter() .values()
.map(|(_, module_id)| Module { id: def_map.module_id(*module_id) }) .map(|module_id| Module { id: def_map.module_id(*module_id) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
children.into_iter() children.into_iter()
} }

View file

@ -161,19 +161,17 @@ fn collect_used_generics<'gp>(
.and_then(|lt| known_generics.iter().find(find_lifetime(&lt.text()))), .and_then(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
), ),
ast::Type::ArrayType(ar) => { ast::Type::ArrayType(ar) => {
if let Some(expr) = ar.expr() { if let Some(ast::Expr::PathExpr(p)) = ar.expr() {
if let ast::Expr::PathExpr(p) = expr { if let Some(path) = p.path() {
if let Some(path) = p.path() { if let Some(name_ref) = path.as_single_name_ref() {
if let Some(name_ref) = path.as_single_name_ref() { if let Some(param) = known_generics.iter().find(|gp| {
if let Some(param) = known_generics.iter().find(|gp| { if let ast::GenericParam::ConstParam(cp) = gp {
if let ast::GenericParam::ConstParam(cp) = gp { cp.name().map_or(false, |n| n.text() == name_ref.text())
cp.name().map_or(false, |n| n.text() == name_ref.text()) } else {
} else { false
false
}
}) {
generics.push(param);
} }
}) {
generics.push(param);
} }
} }
} }

View file

@ -82,18 +82,18 @@ fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code:
let generic_params = impl_.generic_param_list().map(|generic_params| { let generic_params = impl_.generic_param_list().map(|generic_params| {
let lifetime_params = let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam); generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| { let ty_or_const_params = generic_params.type_or_const_params().map(|param| {
// remove defaults since they can't be specified in impls // remove defaults since they can't be specified in impls
match param { match param {
ast::TypeOrConstParam::Type(param) => { ast::TypeOrConstParam::Type(param) => {
let param = param.clone_for_update(); let param = param.clone_for_update();
param.remove_default(); param.remove_default();
Some(ast::GenericParam::TypeParam(param)) ast::GenericParam::TypeParam(param)
} }
ast::TypeOrConstParam::Const(param) => { ast::TypeOrConstParam::Const(param) => {
let param = param.clone_for_update(); let param = param.clone_for_update();
param.remove_default(); param.remove_default();
Some(ast::GenericParam::ConstParam(param)) ast::GenericParam::ConstParam(param)
} }
} }
}); });

View file

@ -92,7 +92,7 @@ trait Merge: AstNode + Clone {
fn try_merge_from(self, items: &mut dyn Iterator<Item = Self>) -> Option<Vec<Edit>> { fn try_merge_from(self, items: &mut dyn Iterator<Item = Self>) -> Option<Vec<Edit>> {
let mut edits = Vec::new(); let mut edits = Vec::new();
let mut merged = self.clone(); let mut merged = self.clone();
while let Some(item) = items.next() { for item in items {
merged = merged.try_merge(&item)?; merged = merged.try_merge(&item)?;
edits.push(Edit::Remove(item.into_either())); edits.push(Edit::Remove(item.into_either()));
} }

View file

@ -86,8 +86,7 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
it.prev_sibling_or_token() it.prev_sibling_or_token()
}) })
.map(|it| it.kind()) .map(|it| it.kind())
.skip_while(|it| it.is_trivia()) .find(|it| !it.is_trivia())
.next()
== Some(T![,]); == Some(T![,]);
let has_arms_after = neighbor(&match_arm, Direction::Next).is_some(); let has_arms_after = neighbor(&match_arm, Direction::Next).is_some();
if !has_comma_after && !has_arms_after { if !has_comma_after && !has_arms_after {

View file

@ -334,11 +334,9 @@ pub fn source_edit_from_references(
} }
_ => false, _ => false,
}; };
if !has_emitted_edit { if !has_emitted_edit && !edited_ranges.contains(&range.start()) {
if !edited_ranges.contains(&range.start()) { edit.replace(range, new_name.to_string());
edit.replace(range, new_name.to_string()); edited_ranges.push(range.start());
edited_ranges.push(range.start());
}
} }
} }
@ -391,19 +389,17 @@ fn source_edit_from_name_ref(
edit.delete(TextRange::new(s, e)); edit.delete(TextRange::new(s, e));
return true; return true;
} }
} else if init == name_ref { } else if init == name_ref && field_name.text() == new_name {
if field_name.text() == new_name { cov_mark::hit!(test_rename_local_put_init_shorthand);
cov_mark::hit!(test_rename_local_put_init_shorthand); // Foo { field: local } -> Foo { field }
// Foo { field: local } -> Foo { field } // ^^^^^^^ delete this
// ^^^^^^^ delete this
// same names, we can use a shorthand here instead. // same names, we can use a shorthand here instead.
// we do not want to erase attributes hence this range start // we do not want to erase attributes hence this range start
let s = field_name.syntax().text_range().end(); let s = field_name.syntax().text_range().end();
let e = init.syntax().text_range().end(); let e = init.syntax().text_range().end();
edit.delete(TextRange::new(s, e)); edit.delete(TextRange::new(s, e));
return true; return true;
}
} }
} }
// init shorthand // init shorthand

View file

@ -494,20 +494,28 @@ impl<'a> FindUsages<'a> {
} }
// Search for `super` and `crate` resolving to our module // Search for `super` and `crate` resolving to our module
match self.def { if let Definition::Module(module) = self.def {
Definition::Module(module) => { let scope =
let scope = search_scope search_scope.intersection(&SearchScope::module_and_children(self.sema.db, module));
.intersection(&SearchScope::module_and_children(self.sema.db, module));
let is_crate_root = let is_crate_root = module.is_crate_root(self.sema.db).then(|| Finder::new("crate"));
module.is_crate_root(self.sema.db).then(|| Finder::new("crate")); let finder = &Finder::new("super");
let finder = &Finder::new("super");
for (text, file_id, search_range) in scope_files(sema, &scope) { for (text, file_id, search_range) in scope_files(sema, &scope) {
let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
for offset in match_indices(&text, finder, search_range) {
if let Some(iter) = find_nodes("super", &tree, offset) {
for name_ref in iter.filter_map(ast::NameRef::cast) {
if self.found_name_ref(&name_ref, sink) {
return;
}
}
}
}
if let Some(finder) = &is_crate_root {
for offset in match_indices(&text, finder, search_range) { for offset in match_indices(&text, finder, search_range) {
if let Some(iter) = find_nodes("super", &tree, offset) { if let Some(iter) = find_nodes("crate", &tree, offset) {
for name_ref in iter.filter_map(ast::NameRef::cast) { for name_ref in iter.filter_map(ast::NameRef::cast) {
if self.found_name_ref(&name_ref, sink) { if self.found_name_ref(&name_ref, sink) {
return; return;
@ -515,20 +523,8 @@ impl<'a> FindUsages<'a> {
} }
} }
} }
if let Some(finder) = &is_crate_root {
for offset in match_indices(&text, finder, search_range) {
if let Some(iter) = find_nodes("crate", &tree, offset) {
for name_ref in iter.filter_map(ast::NameRef::cast) {
if self.found_name_ref(&name_ref, sink) {
return;
}
}
}
}
}
} }
} }
_ => (),
} }
// search for module `self` references in our module's definition source // search for module `self` references in our module's definition source

View file

@ -323,10 +323,10 @@ impl Query {
if symbol.name != self.query { if symbol.name != self.query {
continue; continue;
} }
} else if self.case_sensitive { } else if self.case_sensitive
if self.query.chars().any(|c| !symbol.name.contains(c)) { && self.query.chars().any(|c| !symbol.name.contains(c))
continue; {
} continue;
} }
res.push(symbol.clone()); res.push(symbol.clone());

View file

@ -99,76 +99,66 @@ pub(crate) fn json_in_items(
&& node.last_token().map(|x| x.kind()) == Some(SyntaxKind::R_CURLY) && node.last_token().map(|x| x.kind()) == Some(SyntaxKind::R_CURLY)
{ {
let node_string = node.to_string(); let node_string = node.to_string();
if let Ok(it) = serde_json::from_str(&node_string) { if let Ok(serde_json::Value::Object(it)) = serde_json::from_str(&node_string) {
if let serde_json::Value::Object(it) = it { let import_scope = ImportScope::find_insert_use_container(node, sema)?;
let import_scope = ImportScope::find_insert_use_container(node, sema)?; let range = node.text_range();
let range = node.text_range(); let mut edit = TextEdit::builder();
let mut edit = TextEdit::builder(); edit.delete(range);
edit.delete(range); let mut state = State::default();
let mut state = State::default(); let semantics_scope = sema.scope(node)?;
let semantics_scope = sema.scope(node)?; let scope_resolve =
let scope_resolve = |it| semantics_scope.speculative_resolve(&make::path_from_text(it));
|it| semantics_scope.speculative_resolve(&make::path_from_text(it)); let scope_has = |it| scope_resolve(it).is_some();
let scope_has = |it| scope_resolve(it).is_some(); let deserialize_resolved = scope_resolve("::serde::Deserialize");
let deserialize_resolved = scope_resolve("::serde::Deserialize"); let serialize_resolved = scope_resolve("::serde::Serialize");
let serialize_resolved = scope_resolve("::serde::Serialize"); state.has_deserialize = deserialize_resolved.is_some();
state.has_deserialize = deserialize_resolved.is_some(); state.has_serialize = serialize_resolved.is_some();
state.has_serialize = serialize_resolved.is_some(); state.build_struct(&it);
state.build_struct(&it); edit.insert(range.start(), state.result);
edit.insert(range.start(), state.result); acc.push(
acc.push( Diagnostic::new(
Diagnostic::new( "json-is-not-rust",
"json-is-not-rust", "JSON syntax is not valid as a Rust item",
"JSON syntax is not valid as a Rust item", range,
range, )
) .severity(Severity::WeakWarning)
.severity(Severity::WeakWarning) .with_fixes(Some(vec![{
.with_fixes(Some(vec![{ let mut scb = SourceChangeBuilder::new(file_id);
let mut scb = SourceChangeBuilder::new(file_id); let scope = match import_scope {
let scope = match import_scope { ImportScope::File(it) => ImportScope::File(scb.make_mut(it)),
ImportScope::File(it) => ImportScope::File(scb.make_mut(it)), ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)), };
}; let current_module = semantics_scope.module();
let current_module = semantics_scope.module(); if !scope_has("Serialize") {
if !scope_has("Serialize") { if let Some(PathResolution::Def(it)) = serialize_resolved {
if let Some(PathResolution::Def(it)) = serialize_resolved { if let Some(it) = current_module.find_use_path_prefixed(
if let Some(it) = current_module.find_use_path_prefixed( sema.db,
sema.db, it,
it, config.insert_use.prefix_kind,
config.insert_use.prefix_kind, config.prefer_no_std,
config.prefer_no_std, ) {
) { insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
insert_use(
&scope,
mod_path_to_ast(&it),
&config.insert_use,
);
}
} }
} }
if !scope_has("Deserialize") { }
if let Some(PathResolution::Def(it)) = deserialize_resolved { if !scope_has("Deserialize") {
if let Some(it) = current_module.find_use_path_prefixed( if let Some(PathResolution::Def(it)) = deserialize_resolved {
sema.db, if let Some(it) = current_module.find_use_path_prefixed(
it, sema.db,
config.insert_use.prefix_kind, it,
config.prefer_no_std, config.insert_use.prefix_kind,
) { config.prefer_no_std,
insert_use( ) {
&scope, insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
mod_path_to_ast(&it),
&config.insert_use,
);
}
} }
} }
let mut sc = scb.finish(); }
sc.insert_source_edit(file_id, edit.finish()); let mut sc = scb.finish();
fix("convert_json_to_struct", "Convert JSON to struct", sc, range) sc.insert_source_edit(file_id, edit.finish());
}])), fix("convert_json_to_struct", "Convert JSON to struct", sc, range)
); }])),
} );
} }
} }
Some(()) Some(())

View file

@ -11,10 +11,7 @@ pub(crate) fn private_assoc_item(
d: &hir::PrivateAssocItem, d: &hir::PrivateAssocItem,
) -> Diagnostic { ) -> Diagnostic {
// FIXME: add quickfix // FIXME: add quickfix
let name = match d.item.name(ctx.sema.db) { let name = d.item.name(ctx.sema.db).map(|name| format!("`{name}` ")).unwrap_or_default();
Some(name) => format!("`{}` ", name),
None => String::new(),
};
Diagnostic::new( Diagnostic::new(
"private-assoc-item", "private-assoc-item",
format!( format!(

View file

@ -34,10 +34,7 @@ pub(crate) fn unresolved_proc_macro(
let message = format!( let message = format!(
"{message}: {}", "{message}: {}",
if config_enabled { if config_enabled {
match def_map.proc_macro_loading_error() { def_map.proc_macro_loading_error().unwrap_or("proc macro not found in the built dylib")
Some(e) => e,
None => "proc macro not found in the built dylib",
}
} else { } else {
match d.kind { match d.kind {
hir::MacroKind::Attr if proc_macros_enabled => { hir::MacroKind::Attr if proc_macros_enabled => {

View file

@ -64,12 +64,10 @@ pub(super) fn type_info(
bt_end = if config.markdown() { "```\n" } else { "" } bt_end = if config.markdown() { "```\n" } else { "" }
) )
.into() .into()
} else if config.markdown() {
Markup::fenced_block(&original.display(sema.db))
} else { } else {
if config.markdown() { original.display(sema.db).to_string().into()
Markup::fenced_block(&original.display(sema.db))
} else {
original.display(sema.db).to_string().into()
}
}; };
res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets)); res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets));
Some(res) Some(res)

View file

@ -161,10 +161,8 @@ fn remove_newline(
} }
} }
if config.join_assignments { if config.join_assignments && join_assignments(edit, &prev, &next).is_some() {
if join_assignments(edit, &prev, &next).is_some() { return;
return;
}
} }
if config.unwrap_trivial_blocks { if config.unwrap_trivial_blocks {

View file

@ -413,11 +413,10 @@ fn traverse(
let string = ast::String::cast(token); let string = ast::String::cast(token);
let string_to_highlight = ast::String::cast(descended_token.clone()); let string_to_highlight = ast::String::cast(descended_token.clone());
if let Some((string, expanded_string)) = string.zip(string_to_highlight) { if let Some((string, expanded_string)) = string.zip(string_to_highlight) {
if string.is_raw() { if string.is_raw()
if inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some() && inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some()
{ {
continue; continue;
}
} }
highlight_format_string(hl, &string, &expanded_string, range); highlight_format_string(hl, &string, &expanded_string, range);
highlight_escape_string(hl, &string, range.start()); highlight_escape_string(hl, &string, range.start());

View file

@ -205,10 +205,8 @@ fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
if expr_stmt.semicolon_token().is_some() { if expr_stmt.semicolon_token().is_some() {
return None; return None;
} }
} else { } else if !ast::StmtList::can_cast(binop.syntax().parent()?.kind()) {
if !ast::StmtList::can_cast(binop.syntax().parent()?.kind()) { return None;
return None;
}
} }
let expr = binop.rhs()?; let expr = binop.rhs()?;

View file

@ -212,7 +212,7 @@ fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandRe
} else { } else {
ctx.bindings.get(v, &mut ctx.nesting).map_or_else( ctx.bindings.get(v, &mut ctx.nesting).map_or_else(
|e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) }, |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) },
|it| ExpandResult::ok(it), ExpandResult::ok,
) )
} }
} }

View file

@ -366,7 +366,7 @@ impl ProjectWorkspace {
_ => None, _ => None,
}) })
.collect(); .collect();
let ref mut outputs = match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) { let outputs = &mut match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) {
Ok(it) => Ok(it.into_iter()), Ok(it) => Ok(it.into_iter()),
// io::Error is not Clone? // io::Error is not Clone?
Err(e) => Err(Arc::new(e)), Err(e) => Err(Arc::new(e)),

View file

@ -307,10 +307,10 @@ impl GlobalState {
} }
} }
if !was_quiescent || state_changed || memdocs_added_or_removed { if (!was_quiescent || state_changed || memdocs_added_or_removed)
if self.config.publish_diagnostics() { && self.config.publish_diagnostics()
self.update_diagnostics() {
} self.update_diagnostics()
} }
} }