cargo clippy --fix

This commit is contained in:
Lukas Wirth 2025-07-31 10:30:22 +02:00
parent 423c7dd23a
commit 8ce30264c8
186 changed files with 3056 additions and 3314 deletions

View file

@ -134,10 +134,10 @@ fn next_cfg_expr<S: Copy>(it: &mut tt::iter::TtIter<'_, S>) -> Option<CfgExpr> {
}; };
// Eat comma separator // Eat comma separator
if let Some(TtElement::Leaf(tt::Leaf::Punct(punct))) = it.peek() { if let Some(TtElement::Leaf(tt::Leaf::Punct(punct))) = it.peek()
if punct.char == ',' { && punct.char == ','
it.next(); {
} it.next();
} }
Some(ret) Some(ret)
} }

View file

@ -377,10 +377,10 @@ fn parse_repr_tt(tt: &crate::tt::TopSubtree) -> Option<ReprOptions> {
let mut align = None; let mut align = None;
if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() { if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
tts.next(); tts.next();
if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() { if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next()
if let Ok(a) = lit.symbol.as_str().parse() { && let Ok(a) = lit.symbol.as_str().parse()
align = Align::from_bytes(a).ok(); {
} align = Align::from_bytes(a).ok();
} }
} }
ReprOptions { align, ..Default::default() } ReprOptions { align, ..Default::default() }

View file

@ -1487,13 +1487,13 @@ impl ExprCollector<'_> {
ast::Expr::UnderscoreExpr(_) => self.alloc_pat_from_expr(Pat::Wild, syntax_ptr), ast::Expr::UnderscoreExpr(_) => self.alloc_pat_from_expr(Pat::Wild, syntax_ptr),
ast::Expr::ParenExpr(e) => { ast::Expr::ParenExpr(e) => {
// We special-case `(..)` for consistency with patterns. // We special-case `(..)` for consistency with patterns.
if let Some(ast::Expr::RangeExpr(range)) = e.expr() { if let Some(ast::Expr::RangeExpr(range)) = e.expr()
if range.is_range_full() { && range.is_range_full()
return Some(self.alloc_pat_from_expr( {
Pat::Tuple { args: Box::default(), ellipsis: Some(0) }, return Some(self.alloc_pat_from_expr(
syntax_ptr, Pat::Tuple { args: Box::default(), ellipsis: Some(0) },
)); syntax_ptr,
} ));
} }
return e.expr().and_then(|expr| self.maybe_collect_expr_as_pat(&expr)); return e.expr().and_then(|expr| self.maybe_collect_expr_as_pat(&expr));
} }
@ -2569,19 +2569,18 @@ impl ExprCollector<'_> {
} }
} }
RibKind::MacroDef(macro_id) => { RibKind::MacroDef(macro_id) => {
if let Some((parent_ctx, label_macro_id)) = hygiene_info { if let Some((parent_ctx, label_macro_id)) = hygiene_info
if label_macro_id == **macro_id { && label_macro_id == **macro_id
// A macro is allowed to refer to labels from before its declaration. {
// Therefore, if we got to the rib of its declaration, give up its hygiene // A macro is allowed to refer to labels from before its declaration.
// and use its parent expansion. // Therefore, if we got to the rib of its declaration, give up its hygiene
// and use its parent expansion.
hygiene_id = hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db));
HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db)); hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| {
hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| { let expansion = self.db.lookup_intern_macro_call(expansion.into());
let expansion = self.db.lookup_intern_macro_call(expansion.into()); (parent_ctx.parent(self.db), expansion.def)
(parent_ctx.parent(self.db), expansion.def) });
});
}
} }
} }
_ => {} _ => {}

View file

@ -259,10 +259,10 @@ impl ExprCollector<'_> {
} }
}; };
if let Some(operand_idx) = operand_idx { if let Some(operand_idx) = operand_idx
if let Some(position_span) = to_span(arg.position_span) { && let Some(position_span) = to_span(arg.position_span)
mappings.push((position_span, operand_idx)); {
} mappings.push((position_span, operand_idx));
} }
} }
} }

View file

@ -211,16 +211,17 @@ pub(super) fn lower_path(
// Basically, even in rustc it is quite hacky: // Basically, even in rustc it is quite hacky:
// https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456 // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456
// We follow what it did anyway :) // We follow what it did anyway :)
if segments.len() == 1 && kind == PathKind::Plain { if segments.len() == 1
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { && kind == PathKind::Plain
let syn_ctxt = collector.expander.ctx_for_range(path.segment()?.syntax().text_range()); && let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast)
if let Some(macro_call_id) = syn_ctxt.outer_expn(collector.db) { {
if collector.db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner { let syn_ctxt = collector.expander.ctx_for_range(path.segment()?.syntax().text_range());
kind = match resolve_crate_root(collector.db, syn_ctxt) { if let Some(macro_call_id) = syn_ctxt.outer_expn(collector.db)
Some(crate_root) => PathKind::DollarCrate(crate_root), && collector.db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner
None => PathKind::Crate, {
} kind = match resolve_crate_root(collector.db, syn_ctxt) {
} Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
} }
} }
} }

View file

@ -900,14 +900,12 @@ impl Printer<'_> {
let field_name = arg.name.display(self.db, edition).to_string(); let field_name = arg.name.display(self.db, edition).to_string();
let mut same_name = false; let mut same_name = false;
if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] { if let Pat::Bind { id, subpat: None } = &self.store[arg.pat]
if let Binding { name, mode: BindingAnnotation::Unannotated, .. } = && let Binding { name, mode: BindingAnnotation::Unannotated, .. } =
&self.store.assert_expr_only().bindings[*id] &self.store.assert_expr_only().bindings[*id]
{ && name.as_str() == field_name
if name.as_str() == field_name { {
same_name = true; same_name = true;
}
}
} }
w!(p, "{}", field_name); w!(p, "{}", field_name);

View file

@ -107,11 +107,11 @@ struct FindPathCtx<'db> {
/// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId /// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId
fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Option<ModPath> { fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Option<ModPath> {
// - if the item is a module, jump straight to module search // - if the item is a module, jump straight to module search
if !ctx.is_std_item { if !ctx.is_std_item
if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item { && let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item
return find_path_for_module(ctx, &mut FxHashSet::default(), module_id, true, max_len) {
.map(|choice| choice.path); return find_path_for_module(ctx, &mut FxHashSet::default(), module_id, true, max_len)
} .map(|choice| choice.path);
} }
let may_be_in_scope = match ctx.prefix { let may_be_in_scope = match ctx.prefix {
@ -226,15 +226,15 @@ fn find_path_for_module(
} }
// - if the module can be referenced as self, super or crate, do that // - if the module can be referenced as self, super or crate, do that
if let Some(kind) = is_kw_kind_relative_to_from(ctx.from_def_map, module_id, ctx.from) { if let Some(kind) = is_kw_kind_relative_to_from(ctx.from_def_map, module_id, ctx.from)
if ctx.prefix != PrefixKind::ByCrate || kind == PathKind::Crate { && (ctx.prefix != PrefixKind::ByCrate || kind == PathKind::Crate)
return Some(Choice { {
path: ModPath::from_segments(kind, None), return Some(Choice {
path_text_len: path_kind_len(kind), path: ModPath::from_segments(kind, None),
stability: Stable, path_text_len: path_kind_len(kind),
prefer_due_to_prelude: false, stability: Stable,
}); prefer_due_to_prelude: false,
} });
} }
// - if the module is in the prelude, return it by that path // - if the module is in the prelude, return it by that path
@ -604,29 +604,29 @@ fn find_local_import_locations(
&def_map[module.local_id] &def_map[module.local_id]
}; };
if let Some((name, vis, declared)) = data.scope.name_of(item) { if let Some((name, vis, declared)) = data.scope.name_of(item)
if vis.is_visible_from(db, from) { && vis.is_visible_from(db, from)
let is_pub_or_explicit = match vis { {
Visibility::Module(_, VisibilityExplicitness::Explicit) => { let is_pub_or_explicit = match vis {
cov_mark::hit!(explicit_private_imports); Visibility::Module(_, VisibilityExplicitness::Explicit) => {
true cov_mark::hit!(explicit_private_imports);
} true
Visibility::Module(_, VisibilityExplicitness::Implicit) => {
cov_mark::hit!(discount_private_imports);
false
}
Visibility::PubCrate(_) => true,
Visibility::Public => true,
};
// Ignore private imports unless they are explicit. these could be used if we are
// in a submodule of this module, but that's usually not
// what the user wants; and if this module can import
// the item and we're a submodule of it, so can we.
// Also this keeps the cached data smaller.
if declared || is_pub_or_explicit {
cb(visited_modules, name, module);
} }
Visibility::Module(_, VisibilityExplicitness::Implicit) => {
cov_mark::hit!(discount_private_imports);
false
}
Visibility::PubCrate(_) => true,
Visibility::Public => true,
};
// Ignore private imports unless they are explicit. these could be used if we are
// in a submodule of this module, but that's usually not
// what the user wants; and if this module can import
// the item and we're a submodule of it, so can we.
// Also this keeps the cached data smaller.
if declared || is_pub_or_explicit {
cb(visited_modules, name, module);
} }
} }

View file

@ -510,12 +510,11 @@ impl ItemScope {
id: AttrId, id: AttrId,
idx: usize, idx: usize,
) { ) {
if let Some(derives) = self.derive_macros.get_mut(&adt) { if let Some(derives) = self.derive_macros.get_mut(&adt)
if let Some(DeriveMacroInvocation { derive_call_ids, .. }) = && let Some(DeriveMacroInvocation { derive_call_ids, .. }) =
derives.iter_mut().find(|&&mut DeriveMacroInvocation { attr_id, .. }| id == attr_id) derives.iter_mut().find(|&&mut DeriveMacroInvocation { attr_id, .. }| id == attr_id)
{ {
derive_call_ids[idx] = Some(call); derive_call_ids[idx] = Some(call);
}
} }
} }

View file

@ -83,12 +83,12 @@ impl<'a> Ctx<'a> {
.flat_map(|item| self.lower_mod_item(&item)) .flat_map(|item| self.lower_mod_item(&item))
.collect(); .collect();
if let Some(ast::Expr::MacroExpr(tail_macro)) = stmts.expr() { if let Some(ast::Expr::MacroExpr(tail_macro)) = stmts.expr()
if let Some(call) = tail_macro.macro_call() { && let Some(call) = tail_macro.macro_call()
cov_mark::hit!(macro_stmt_with_trailing_macro_expr); {
if let Some(mod_item) = self.lower_mod_item(&call.into()) { cov_mark::hit!(macro_stmt_with_trailing_macro_expr);
self.top_level.push(mod_item); if let Some(mod_item) = self.lower_mod_item(&call.into()) {
} self.top_level.push(mod_item);
} }
} }
@ -112,12 +112,11 @@ impl<'a> Ctx<'a> {
_ => None, _ => None,
}) })
.collect(); .collect();
if let Some(ast::Expr::MacroExpr(expr)) = block.tail_expr() { if let Some(ast::Expr::MacroExpr(expr)) = block.tail_expr()
if let Some(call) = expr.macro_call() { && let Some(call) = expr.macro_call()
if let Some(mod_item) = self.lower_mod_item(&call.into()) { && let Some(mod_item) = self.lower_mod_item(&call.into())
self.top_level.push(mod_item); {
} self.top_level.push(mod_item);
}
} }
self.tree.vis.arena = self.visibilities.into_iter().collect(); self.tree.vis.arena = self.visibilities.into_iter().collect();
self.tree.top_level = self.top_level.into_boxed_slice(); self.tree.top_level = self.top_level.into_boxed_slice();

View file

@ -218,10 +218,10 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option
for (_, module_data) in crate_def_map.modules() { for (_, module_data) in crate_def_map.modules() {
for def in module_data.scope.declarations() { for def in module_data.scope.declarations() {
if let ModuleDefId::TraitId(trait_) = def { if let ModuleDefId::TraitId(trait_) = def
if db.attrs(trait_.into()).has_doc_notable_trait() { && db.attrs(trait_.into()).has_doc_notable_trait()
traits.push(trait_); {
} traits.push(trait_);
} }
} }
} }

View file

@ -221,46 +221,42 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
_ => None, _ => None,
}; };
if let Some(src) = src { if let Some(src) = src
if let Some(file_id) = src.file_id.macro_file() { && let Some(file_id) = src.file_id.macro_file()
if let MacroKind::Derive && let MacroKind::Derive
| MacroKind::DeriveBuiltIn | MacroKind::DeriveBuiltIn
| MacroKind::Attr | MacroKind::Attr
| MacroKind::AttrBuiltIn = file_id.kind(&db) | MacroKind::AttrBuiltIn = file_id.kind(&db)
{ {
let call = file_id.call_node(&db); let call = file_id.call_node(&db);
let mut show_spans = false; let mut show_spans = false;
let mut show_ctxt = false; let mut show_ctxt = false;
for comment in for comment in call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) {
call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) show_spans |= comment.to_string().contains("+spans");
{ show_ctxt |= comment.to_string().contains("+syntaxctxt");
show_spans |= comment.to_string().contains("+spans");
show_ctxt |= comment.to_string().contains("+syntaxctxt");
}
let pp = pretty_print_macro_expansion(
src.value,
db.span_map(src.file_id).as_ref(),
show_spans,
show_ctxt,
);
format_to!(expanded_text, "\n{}", pp)
}
} }
let pp = pretty_print_macro_expansion(
src.value,
db.span_map(src.file_id).as_ref(),
show_spans,
show_ctxt,
);
format_to!(expanded_text, "\n{}", pp)
} }
} }
for impl_id in def_map[local_id].scope.impls() { for impl_id in def_map[local_id].scope.impls() {
let src = impl_id.lookup(&db).source(&db); let src = impl_id.lookup(&db).source(&db);
if let Some(macro_file) = src.file_id.macro_file() { if let Some(macro_file) = src.file_id.macro_file()
if let MacroKind::DeriveBuiltIn | MacroKind::Derive = macro_file.kind(&db) { && let MacroKind::DeriveBuiltIn | MacroKind::Derive = macro_file.kind(&db)
let pp = pretty_print_macro_expansion( {
src.value.syntax().clone(), let pp = pretty_print_macro_expansion(
db.span_map(macro_file.into()).as_ref(), src.value.syntax().clone(),
false, db.span_map(macro_file.into()).as_ref(),
false, false,
); false,
format_to!(expanded_text, "\n{}", pp) );
} format_to!(expanded_text, "\n{}", pp)
} }
} }

View file

@ -261,20 +261,20 @@ impl<'db> DefCollector<'db> {
// Process other crate-level attributes. // Process other crate-level attributes.
for attr in &*attrs { for attr in &*attrs {
if let Some(cfg) = attr.cfg() { if let Some(cfg) = attr.cfg()
if self.cfg_options.check(&cfg) == Some(false) { && self.cfg_options.check(&cfg) == Some(false)
process = false; {
break; process = false;
} break;
} }
let Some(attr_name) = attr.path.as_ident() else { continue }; let Some(attr_name) = attr.path.as_ident() else { continue };
match () { match () {
() if *attr_name == sym::recursion_limit => { () if *attr_name == sym::recursion_limit => {
if let Some(limit) = attr.string_value() { if let Some(limit) = attr.string_value()
if let Ok(limit) = limit.as_str().parse() { && let Ok(limit) = limit.as_str().parse()
crate_data.recursion_limit = Some(limit); {
} crate_data.recursion_limit = Some(limit);
} }
} }
() if *attr_name == sym::crate_type => { () if *attr_name == sym::crate_type => {
@ -1188,56 +1188,44 @@ impl<'db> DefCollector<'db> {
// Multiple globs may import the same item and they may override visibility from // Multiple globs may import the same item and they may override visibility from
// previously resolved globs. Handle overrides here and leave the rest to // previously resolved globs. Handle overrides here and leave the rest to
// `ItemScope::push_res_with_import()`. // `ItemScope::push_res_with_import()`.
if let Some(def) = defs.types { if let Some(def) = defs.types
if let Some(prev_def) = prev_defs.types { && let Some(prev_def) = prev_defs.types
if def.def == prev_def.def && def.def == prev_def.def
&& self.from_glob_import.contains_type(module_id, name.clone()) && self.from_glob_import.contains_type(module_id, name.clone())
&& def.vis != prev_def.vis && def.vis != prev_def.vis
&& def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis)
{ {
changed = true; changed = true;
// This import is being handled here, don't pass it down to // This import is being handled here, don't pass it down to
// `ItemScope::push_res_with_import()`. // `ItemScope::push_res_with_import()`.
defs.types = None; defs.types = None;
self.def_map.modules[module_id] self.def_map.modules[module_id].scope.update_visibility_types(name, def.vis);
.scope
.update_visibility_types(name, def.vis);
}
}
} }
if let Some(def) = defs.values { if let Some(def) = defs.values
if let Some(prev_def) = prev_defs.values { && let Some(prev_def) = prev_defs.values
if def.def == prev_def.def && def.def == prev_def.def
&& self.from_glob_import.contains_value(module_id, name.clone()) && self.from_glob_import.contains_value(module_id, name.clone())
&& def.vis != prev_def.vis && def.vis != prev_def.vis
&& def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis)
{ {
changed = true; changed = true;
// See comment above. // See comment above.
defs.values = None; defs.values = None;
self.def_map.modules[module_id] self.def_map.modules[module_id].scope.update_visibility_values(name, def.vis);
.scope
.update_visibility_values(name, def.vis);
}
}
} }
if let Some(def) = defs.macros { if let Some(def) = defs.macros
if let Some(prev_def) = prev_defs.macros { && let Some(prev_def) = prev_defs.macros
if def.def == prev_def.def && def.def == prev_def.def
&& self.from_glob_import.contains_macro(module_id, name.clone()) && self.from_glob_import.contains_macro(module_id, name.clone())
&& def.vis != prev_def.vis && def.vis != prev_def.vis
&& def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis) && def.vis.max(prev_def.vis, &self.def_map) == Some(def.vis)
{ {
changed = true; changed = true;
// See comment above. // See comment above.
defs.macros = None; defs.macros = None;
self.def_map.modules[module_id] self.def_map.modules[module_id].scope.update_visibility_macros(name, def.vis);
.scope
.update_visibility_macros(name, def.vis);
}
}
} }
} }
@ -1392,15 +1380,14 @@ impl<'db> DefCollector<'db> {
Resolved::Yes Resolved::Yes
}; };
if let Some(ident) = path.as_ident() { if let Some(ident) = path.as_ident()
if let Some(helpers) = self.def_map.derive_helpers_in_scope.get(&ast_id) { && let Some(helpers) = self.def_map.derive_helpers_in_scope.get(&ast_id)
if helpers.iter().any(|(it, ..)| it == ident) { && helpers.iter().any(|(it, ..)| it == ident)
cov_mark::hit!(resolved_derive_helper); {
// Resolved to derive helper. Collect the item's attributes again, cov_mark::hit!(resolved_derive_helper);
// starting after the derive helper. // Resolved to derive helper. Collect the item's attributes again,
return recollect_without(self); // starting after the derive helper.
} return recollect_without(self);
}
} }
let def = match resolver_def_id(path) { let def = match resolver_def_id(path) {
@ -1729,12 +1716,12 @@ impl ModCollector<'_, '_> {
let mut process_mod_item = |item: ModItemId| { let mut process_mod_item = |item: ModItemId| {
let attrs = self.item_tree.attrs(db, krate, item.ast_id()); let attrs = self.item_tree.attrs(db, krate, item.ast_id());
if let Some(cfg) = attrs.cfg() { if let Some(cfg) = attrs.cfg()
if !self.is_cfg_enabled(&cfg) { && !self.is_cfg_enabled(&cfg)
let ast_id = item.ast_id().erase(); {
self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg); let ast_id = item.ast_id().erase();
return; self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg);
} return;
} }
if let Err(()) = self.resolve_attributes(&attrs, item, container) { if let Err(()) = self.resolve_attributes(&attrs, item, container) {
@ -1871,14 +1858,13 @@ impl ModCollector<'_, '_> {
if self.def_collector.def_map.block.is_none() if self.def_collector.def_map.block.is_none()
&& self.def_collector.is_proc_macro && self.def_collector.is_proc_macro
&& self.module_id == DefMap::ROOT && self.module_id == DefMap::ROOT
&& let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name)
{ {
if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { self.def_collector.export_proc_macro(
self.def_collector.export_proc_macro( proc_macro,
proc_macro, InFile::new(self.file_id(), id),
InFile::new(self.file_id(), id), fn_id,
fn_id, );
);
}
} }
update_def(self.def_collector, fn_id.into(), &it.name, vis, false); update_def(self.def_collector, fn_id.into(), &it.name, vis, false);
@ -2419,13 +2405,13 @@ impl ModCollector<'_, '_> {
macro_id, macro_id,
&self.item_tree[mac.visibility], &self.item_tree[mac.visibility],
); );
if let Some(helpers) = helpers_opt { if let Some(helpers) = helpers_opt
if self.def_collector.def_map.block.is_none() { && self.def_collector.def_map.block.is_none()
Arc::get_mut(&mut self.def_collector.def_map.data) {
.unwrap() Arc::get_mut(&mut self.def_collector.def_map.data)
.exported_derives .unwrap()
.insert(macro_id.into(), helpers); .exported_derives
} .insert(macro_id.into(), helpers);
} }
} }

View file

@ -228,15 +228,15 @@ impl<'db> Resolver<'db> {
ResolvePathResultPrefixInfo::default(), ResolvePathResultPrefixInfo::default(),
)); ));
} }
} else if let &GenericDefId::AdtId(adt) = def { } else if let &GenericDefId::AdtId(adt) = def
if *first_name == sym::Self_ { && *first_name == sym::Self_
return Some(( {
TypeNs::AdtSelfType(adt), return Some((
remaining_idx(), TypeNs::AdtSelfType(adt),
None, remaining_idx(),
ResolvePathResultPrefixInfo::default(), None,
)); ResolvePathResultPrefixInfo::default(),
} ));
} }
if let Some(id) = params.find_type_by_name(first_name, *def) { if let Some(id) = params.find_type_by_name(first_name, *def) {
return Some(( return Some((
@ -401,13 +401,13 @@ impl<'db> Resolver<'db> {
handle_macro_def_scope(db, &mut hygiene_id, &mut hygiene_info, macro_id) handle_macro_def_scope(db, &mut hygiene_id, &mut hygiene_info, macro_id)
} }
Scope::GenericParams { params, def } => { Scope::GenericParams { params, def } => {
if let &GenericDefId::ImplId(impl_) = def { if let &GenericDefId::ImplId(impl_) = def
if *first_name == sym::Self_ { && *first_name == sym::Self_
return Some(( {
ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_), None), return Some((
ResolvePathResultPrefixInfo::default(), ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_), None),
)); ResolvePathResultPrefixInfo::default(),
} ));
} }
if let Some(id) = params.find_const_by_name(first_name, *def) { if let Some(id) = params.find_const_by_name(first_name, *def) {
let val = ValueNs::GenericParam(id); let val = ValueNs::GenericParam(id);
@ -436,14 +436,14 @@ impl<'db> Resolver<'db> {
ResolvePathResultPrefixInfo::default(), ResolvePathResultPrefixInfo::default(),
)); ));
} }
} else if let &GenericDefId::AdtId(adt) = def { } else if let &GenericDefId::AdtId(adt) = def
if *first_name == sym::Self_ { && *first_name == sym::Self_
let ty = TypeNs::AdtSelfType(adt); {
return Some(( let ty = TypeNs::AdtSelfType(adt);
ResolveValueResult::Partial(ty, 1, None), return Some((
ResolvePathResultPrefixInfo::default(), ResolveValueResult::Partial(ty, 1, None),
)); ResolvePathResultPrefixInfo::default(),
} ));
} }
if let Some(id) = params.find_type_by_name(first_name, *def) { if let Some(id) = params.find_type_by_name(first_name, *def) {
let ty = TypeNs::GenericParam(id); let ty = TypeNs::GenericParam(id);
@ -469,13 +469,14 @@ impl<'db> Resolver<'db> {
// If a path of the shape `u16::from_le_bytes` failed to resolve at all, then we fall back // If a path of the shape `u16::from_le_bytes` failed to resolve at all, then we fall back
// to resolving to the primitive type, to allow this to still work in the presence of // to resolving to the primitive type, to allow this to still work in the presence of
// `use core::u16;`. // `use core::u16;`.
if path.kind == PathKind::Plain && n_segments > 1 { if path.kind == PathKind::Plain
if let Some(builtin) = BuiltinType::by_name(first_name) { && n_segments > 1
return Some(( && let Some(builtin) = BuiltinType::by_name(first_name)
ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1, None), {
ResolvePathResultPrefixInfo::default(), return Some((
)); ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1, None),
} ResolvePathResultPrefixInfo::default(),
));
} }
None None
@ -660,12 +661,11 @@ impl<'db> Resolver<'db> {
Scope::BlockScope(m) => traits.extend(m.def_map[m.module_id].scope.traits()), Scope::BlockScope(m) => traits.extend(m.def_map[m.module_id].scope.traits()),
&Scope::GenericParams { def: GenericDefId::ImplId(impl_), .. } => { &Scope::GenericParams { def: GenericDefId::ImplId(impl_), .. } => {
let impl_data = db.impl_signature(impl_); let impl_data = db.impl_signature(impl_);
if let Some(target_trait) = impl_data.target_trait { if let Some(target_trait) = impl_data.target_trait
if let Some(TypeNs::TraitId(trait_)) = self && let Some(TypeNs::TraitId(trait_)) = self
.resolve_path_in_type_ns_fully(db, &impl_data.store[target_trait.path]) .resolve_path_in_type_ns_fully(db, &impl_data.store[target_trait.path])
{ {
traits.insert(trait_); traits.insert(trait_);
}
} }
} }
_ => (), _ => (),
@ -918,17 +918,17 @@ fn handle_macro_def_scope(
hygiene_info: &mut Option<(SyntaxContext, MacroDefId)>, hygiene_info: &mut Option<(SyntaxContext, MacroDefId)>,
macro_id: &MacroDefId, macro_id: &MacroDefId,
) { ) {
if let Some((parent_ctx, label_macro_id)) = hygiene_info { if let Some((parent_ctx, label_macro_id)) = hygiene_info
if label_macro_id == macro_id { && label_macro_id == macro_id
// A macro is allowed to refer to variables from before its declaration. {
// Therefore, if we got to the rib of its declaration, give up its hygiene // A macro is allowed to refer to variables from before its declaration.
// and use its parent expansion. // Therefore, if we got to the rib of its declaration, give up its hygiene
*hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db)); // and use its parent expansion.
*hygiene_info = parent_ctx.outer_expn(db).map(|expansion| { *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db));
let expansion = db.lookup_intern_macro_call(expansion.into()); *hygiene_info = parent_ctx.outer_expn(db).map(|expansion| {
(parent_ctx.parent(db), expansion.def) let expansion = db.lookup_intern_macro_call(expansion.into());
}); (parent_ctx.parent(db), expansion.def)
} });
} }
} }

View file

@ -555,12 +555,11 @@ fn concat_expand(
// FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses // FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses
// to ensure the right parsing order, so skip the parentheses here. Ideally we'd // to ensure the right parsing order, so skip the parentheses here. Ideally we'd
// implement rustc's model. cc https://github.com/rust-lang/rust-analyzer/pull/10623 // implement rustc's model. cc https://github.com/rust-lang/rust-analyzer/pull/10623
if let TtElement::Subtree(subtree, subtree_iter) = &t { if let TtElement::Subtree(subtree, subtree_iter) = &t
if let [tt::TokenTree::Leaf(tt)] = subtree_iter.remaining().flat_tokens() { && let [tt::TokenTree::Leaf(tt)] = subtree_iter.remaining().flat_tokens()
if subtree.delimiter.kind == tt::DelimiterKind::Parenthesis { && subtree.delimiter.kind == tt::DelimiterKind::Parenthesis
t = TtElement::Leaf(tt); {
} t = TtElement::Leaf(tt);
}
} }
match t { match t {
TtElement::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => { TtElement::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => {

View file

@ -334,10 +334,10 @@ where
_ => Some(CfgExpr::Atom(CfgAtom::Flag(name))), _ => Some(CfgExpr::Atom(CfgAtom::Flag(name))),
}, },
}; };
if let Some(NodeOrToken::Token(element)) = iter.peek() { if let Some(NodeOrToken::Token(element)) = iter.peek()
if element.kind() == syntax::T![,] { && element.kind() == syntax::T![,]
iter.next(); {
} iter.next();
} }
result result
} }

View file

@ -280,8 +280,8 @@ pub(crate) fn fixup_syntax(
} }
}, },
ast::RecordExprField(it) => { ast::RecordExprField(it) => {
if let Some(colon) = it.colon_token() { if let Some(colon) = it.colon_token()
if it.name_ref().is_some() && it.expr().is_none() { && it.name_ref().is_some() && it.expr().is_none() {
append.insert(colon.into(), vec![ append.insert(colon.into(), vec![
Leaf::Ident(Ident { Leaf::Ident(Ident {
sym: sym::__ra_fixup, sym: sym::__ra_fixup,
@ -290,11 +290,10 @@ pub(crate) fn fixup_syntax(
}) })
]); ]);
} }
}
}, },
ast::Path(it) => { ast::Path(it) => {
if let Some(colon) = it.coloncolon_token() { if let Some(colon) = it.coloncolon_token()
if it.segment().is_none() { && it.segment().is_none() {
append.insert(colon.into(), vec![ append.insert(colon.into(), vec![
Leaf::Ident(Ident { Leaf::Ident(Ident {
sym: sym::__ra_fixup, sym: sym::__ra_fixup,
@ -303,7 +302,6 @@ pub(crate) fn fixup_syntax(
}) })
]); ]);
} }
}
}, },
ast::ClosureExpr(it) => { ast::ClosureExpr(it) => {
if it.body().is_none() { if it.body().is_none() {

View file

@ -365,12 +365,11 @@ impl HirFileId {
HirFileId::FileId(id) => break id, HirFileId::FileId(id) => break id,
HirFileId::MacroFile(file) => { HirFileId::MacroFile(file) => {
let loc = db.lookup_intern_macro_call(file); let loc = db.lookup_intern_macro_call(file);
if loc.def.is_include() { if loc.def.is_include()
if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind { && let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind
if let Ok(it) = include_input_to_file_id(db, file, &eager.arg) { && let Ok(it) = include_input_to_file_id(db, file, &eager.arg)
break it; {
} break it;
}
} }
self = loc.kind.file_id(); self = loc.kind.file_id();
} }
@ -648,12 +647,11 @@ impl MacroCallLoc {
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
) -> Option<EditionedFileId> { ) -> Option<EditionedFileId> {
if self.def.is_include() { if self.def.is_include()
if let MacroCallKind::FnLike { eager: Some(eager), .. } = &self.kind { && let MacroCallKind::FnLike { eager: Some(eager), .. } = &self.kind
if let Ok(it) = include_input_to_file_id(db, macro_call_id, &eager.arg) { && let Ok(it) = include_input_to_file_id(db, macro_call_id, &eager.arg)
return Some(it); {
} return Some(it);
}
} }
None None

View file

@ -273,16 +273,17 @@ fn convert_path(
// Basically, even in rustc it is quite hacky: // Basically, even in rustc it is quite hacky:
// https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456 // https://github.com/rust-lang/rust/blob/614f273e9388ddd7804d5cbc80b8865068a3744e/src/librustc_resolve/macros.rs#L456
// We follow what it did anyway :) // We follow what it did anyway :)
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain { if mod_path.segments.len() == 1
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { && mod_path.kind == PathKind::Plain
let syn_ctx = span_for_range(segment.syntax().text_range()); && let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast)
if let Some(macro_call_id) = syn_ctx.outer_expn(db) { {
if db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner { let syn_ctx = span_for_range(segment.syntax().text_range());
mod_path.kind = match resolve_crate_root(db, syn_ctx) { if let Some(macro_call_id) = syn_ctx.outer_expn(db)
Some(crate_root) => PathKind::DollarCrate(crate_root), && db.lookup_intern_macro_call(macro_call_id.into()).def.local_inner
None => PathKind::Crate, {
} mod_path.kind = match resolve_crate_root(db, syn_ctx) {
} Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
} }
} }
} }

View file

@ -197,10 +197,11 @@ pub(crate) fn deref_by_trait(
// effectively bump the MSRV of rust-analyzer to 1.84 due to 1.83 and below lacking the // effectively bump the MSRV of rust-analyzer to 1.84 due to 1.83 and below lacking the
// blanked impl on `Deref`. // blanked impl on `Deref`.
#[expect(clippy::overly_complex_bool_expr)] #[expect(clippy::overly_complex_bool_expr)]
if use_receiver_trait && false { if use_receiver_trait
if let Some(receiver) = LangItem::Receiver.resolve_trait(db, table.trait_env.krate) { && false
return Some(receiver); && let Some(receiver) = LangItem::Receiver.resolve_trait(db, table.trait_env.krate)
} {
return Some(receiver);
} }
// Old rustc versions might not have `Receiver` trait. // Old rustc versions might not have `Receiver` trait.
// Fallback to `Deref` if they don't // Fallback to `Deref` if they don't

View file

@ -309,11 +309,11 @@ impl TyBuilder<hir_def::AdtId> {
if let Some(defaults) = defaults.get(self.vec.len()..) { if let Some(defaults) = defaults.get(self.vec.len()..) {
for default_ty in defaults { for default_ty in defaults {
// NOTE(skip_binders): we only check if the arg type is error type. // NOTE(skip_binders): we only check if the arg type is error type.
if let Some(x) = default_ty.skip_binders().ty(Interner) { if let Some(x) = default_ty.skip_binders().ty(Interner)
if x.is_unknown() { && x.is_unknown()
self.vec.push(fallback().cast(Interner)); {
continue; self.vec.push(fallback().cast(Interner));
} continue;
} }
// Each default can only depend on the previous parameters. // Each default can only depend on the previous parameters.
self.vec.push(default_ty.clone().substitute(Interner, &*self.vec).cast(Interner)); self.vec.push(default_ty.clone().substitute(Interner, &*self.vec).cast(Interner));

View file

@ -83,34 +83,34 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None }) Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None })
} }
fn discriminant_type(&self, ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> { fn discriminant_type(&self, ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner) { if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner)
if let hir_def::AdtId::EnumId(e) = id.0 { && let hir_def::AdtId::EnumId(e) = id.0
let enum_data = self.db.enum_signature(e); {
let ty = enum_data.repr.unwrap_or_default().discr_type(); let enum_data = self.db.enum_signature(e);
return chalk_ir::TyKind::Scalar(match ty { let ty = enum_data.repr.unwrap_or_default().discr_type();
hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed { return chalk_ir::TyKind::Scalar(match ty {
true => chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize), hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed {
false => chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize), true => chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize),
}, false => chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize),
hir_def::layout::IntegerType::Fixed(size, is_signed) => match is_signed { },
true => chalk_ir::Scalar::Int(match size { hir_def::layout::IntegerType::Fixed(size, is_signed) => match is_signed {
hir_def::layout::Integer::I8 => chalk_ir::IntTy::I8, true => chalk_ir::Scalar::Int(match size {
hir_def::layout::Integer::I16 => chalk_ir::IntTy::I16, hir_def::layout::Integer::I8 => chalk_ir::IntTy::I8,
hir_def::layout::Integer::I32 => chalk_ir::IntTy::I32, hir_def::layout::Integer::I16 => chalk_ir::IntTy::I16,
hir_def::layout::Integer::I64 => chalk_ir::IntTy::I64, hir_def::layout::Integer::I32 => chalk_ir::IntTy::I32,
hir_def::layout::Integer::I128 => chalk_ir::IntTy::I128, hir_def::layout::Integer::I64 => chalk_ir::IntTy::I64,
}), hir_def::layout::Integer::I128 => chalk_ir::IntTy::I128,
false => chalk_ir::Scalar::Uint(match size { }),
hir_def::layout::Integer::I8 => chalk_ir::UintTy::U8, false => chalk_ir::Scalar::Uint(match size {
hir_def::layout::Integer::I16 => chalk_ir::UintTy::U16, hir_def::layout::Integer::I8 => chalk_ir::UintTy::U8,
hir_def::layout::Integer::I32 => chalk_ir::UintTy::U32, hir_def::layout::Integer::I16 => chalk_ir::UintTy::U16,
hir_def::layout::Integer::I64 => chalk_ir::UintTy::U64, hir_def::layout::Integer::I32 => chalk_ir::UintTy::U32,
hir_def::layout::Integer::I128 => chalk_ir::UintTy::U128, hir_def::layout::Integer::I64 => chalk_ir::UintTy::U64,
}), hir_def::layout::Integer::I128 => chalk_ir::UintTy::U128,
}, }),
}) },
.intern(Interner); })
} .intern(Interner);
} }
chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U8)).intern(Interner) chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U8)).intern(Interner)
} }
@ -142,10 +142,10 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
) -> Option<chalk_ir::TyVariableKind> { ) -> Option<chalk_ir::TyVariableKind> {
if let TyKind::BoundVar(bv) = ty.kind(Interner) { if let TyKind::BoundVar(bv) = ty.kind(Interner) {
let binders = binders.as_slice(Interner); let binders = binders.as_slice(Interner);
if bv.debruijn == DebruijnIndex::INNERMOST { if bv.debruijn == DebruijnIndex::INNERMOST
if let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind { && let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind
return Some(tk); {
} return Some(tk);
} }
} }
None None

View file

@ -342,10 +342,10 @@ pub(crate) fn eval_to_const(
return c; return c;
} }
} }
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr) { if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr)
if let Ok((Ok(result), _)) = interpret_mir(db, Arc::new(mir_body), true, None) { && let Ok((Ok(result), _)) = interpret_mir(db, Arc::new(mir_body), true, None)
return result; {
} return result;
} }
unknown_const(infer[expr].clone()) unknown_const(infer[expr].clone())
} }

View file

@ -657,10 +657,10 @@ impl<'a> DeclValidator<'a> {
} }
fn is_trait_impl_container(&self, container_id: ItemContainerId) -> bool { fn is_trait_impl_container(&self, container_id: ItemContainerId) -> bool {
if let ItemContainerId::ImplId(impl_id) = container_id { if let ItemContainerId::ImplId(impl_id) = container_id
if self.db.impl_trait(impl_id).is_some() { && self.db.impl_trait(impl_id).is_some()
return true; {
} return true;
} }
false false
} }

View file

@ -528,15 +528,15 @@ impl FilterMapNextChecker {
return None; return None;
} }
if *function_id == self.next_function_id? { if *function_id == self.next_function_id?
if let Some(prev_filter_map_expr_id) = self.prev_filter_map_expr_id { && let Some(prev_filter_map_expr_id) = self.prev_filter_map_expr_id
let is_dyn_trait = self {
.prev_receiver_ty let is_dyn_trait = self
.as_ref() .prev_receiver_ty
.is_some_and(|it| it.strip_references().dyn_trait().is_some()); .as_ref()
if *receiver_expr_id == prev_filter_map_expr_id && !is_dyn_trait { .is_some_and(|it| it.strip_references().dyn_trait().is_some());
return Some(()); if *receiver_expr_id == prev_filter_map_expr_id && !is_dyn_trait {
} return Some(());
} }
} }

View file

@ -382,10 +382,10 @@ impl HirDisplay for Pat {
let subpats = (0..num_fields).map(|i| { let subpats = (0..num_fields).map(|i| {
WriteWith(move |f| { WriteWith(move |f| {
let fid = LocalFieldId::from_raw((i as u32).into()); let fid = LocalFieldId::from_raw((i as u32).into());
if let Some(p) = subpatterns.get(i) { if let Some(p) = subpatterns.get(i)
if p.field == fid { && p.field == fid
return p.pattern.hir_fmt(f); {
} return p.pattern.hir_fmt(f);
} }
if let Some(p) = subpatterns.iter().find(|p| p.field == fid) { if let Some(p) = subpatterns.iter().find(|p| p.field == fid) {
p.pattern.hir_fmt(f) p.pattern.hir_fmt(f)

View file

@ -272,10 +272,10 @@ impl<'db> UnsafeVisitor<'db> {
if let Some(func) = callee.as_fn_def(self.db) { if let Some(func) = callee.as_fn_def(self.db) {
self.check_call(current, func); self.check_call(current, func);
} }
if let TyKind::Function(fn_ptr) = callee.kind(Interner) { if let TyKind::Function(fn_ptr) = callee.kind(Interner)
if fn_ptr.sig.safety == chalk_ir::Safety::Unsafe { && fn_ptr.sig.safety == chalk_ir::Safety::Unsafe
self.on_unsafe_op(current.into(), UnsafetyReason::UnsafeFnCall); {
} self.on_unsafe_op(current.into(), UnsafetyReason::UnsafeFnCall);
} }
} }
Expr::Path(path) => { Expr::Path(path) => {
@ -346,12 +346,11 @@ impl<'db> UnsafeVisitor<'db> {
Expr::Cast { .. } => self.inside_assignment = inside_assignment, Expr::Cast { .. } => self.inside_assignment = inside_assignment,
Expr::Field { .. } => { Expr::Field { .. } => {
self.inside_assignment = inside_assignment; self.inside_assignment = inside_assignment;
if !inside_assignment { if !inside_assignment
if let Some(Either::Left(FieldId { parent: VariantId::UnionId(_), .. })) = && let Some(Either::Left(FieldId { parent: VariantId::UnionId(_), .. })) =
self.infer.field_resolution(current) self.infer.field_resolution(current)
{ {
self.on_unsafe_op(current.into(), UnsafetyReason::UnionField); self.on_unsafe_op(current.into(), UnsafetyReason::UnionField);
}
} }
} }
Expr::Unsafe { statements, .. } => { Expr::Unsafe { statements, .. } => {

View file

@ -608,48 +608,46 @@ impl HirDisplay for ProjectionTy {
// if we are projection on a type parameter, check if the projection target has bounds // if we are projection on a type parameter, check if the projection target has bounds
// itself, if so, we render them directly as `impl Bound` instead of the less useful // itself, if so, we render them directly as `impl Bound` instead of the less useful
// `<Param as Trait>::Assoc` // `<Param as Trait>::Assoc`
if !f.display_kind.is_source_code() { if !f.display_kind.is_source_code()
if let TyKind::Placeholder(idx) = self_ty.kind(Interner) { && let TyKind::Placeholder(idx) = self_ty.kind(Interner)
if !f.bounds_formatting_ctx.contains(self) { && !f.bounds_formatting_ctx.contains(self)
let db = f.db; {
let id = from_placeholder_idx(db, *idx); let db = f.db;
let generics = generics(db, id.parent); let id = from_placeholder_idx(db, *idx);
let generics = generics(db, id.parent);
let substs = generics.placeholder_subst(db); let substs = generics.placeholder_subst(db);
let bounds = db let bounds = db
.generic_predicates(id.parent) .generic_predicates(id.parent)
.iter() .iter()
.map(|pred| pred.clone().substitute(Interner, &substs)) .map(|pred| pred.clone().substitute(Interner, &substs))
.filter(|wc| { .filter(|wc| {
let ty = match wc.skip_binders() { let ty = match wc.skip_binders() {
WhereClause::Implemented(tr) => tr.self_type_parameter(Interner), WhereClause::Implemented(tr) => tr.self_type_parameter(Interner),
WhereClause::TypeOutlives(t) => t.ty.clone(), WhereClause::TypeOutlives(t) => t.ty.clone(),
// We shouldn't be here if these exist // We shouldn't be here if these exist
WhereClause::AliasEq(_) | WhereClause::LifetimeOutlives(_) => { WhereClause::AliasEq(_) | WhereClause::LifetimeOutlives(_) => {
return false; return false;
} }
}; };
let TyKind::Alias(AliasTy::Projection(proj)) = ty.kind(Interner) else { let TyKind::Alias(AliasTy::Projection(proj)) = ty.kind(Interner) else {
return false; return false;
}; };
proj == self proj == self
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if !bounds.is_empty() { if !bounds.is_empty() {
return f.format_bounds_with(self.clone(), |f| { return f.format_bounds_with(self.clone(), |f| {
write_bounds_like_dyn_trait_with_prefix( write_bounds_like_dyn_trait_with_prefix(
f, f,
"impl", "impl",
Either::Left( Either::Left(
&TyKind::Alias(AliasTy::Projection(self.clone())) &TyKind::Alias(AliasTy::Projection(self.clone())).intern(Interner),
.intern(Interner), ),
), &bounds,
&bounds, SizedByDefault::NotSized,
SizedByDefault::NotSized, )
) });
});
}
}
} }
} }
@ -1860,18 +1858,13 @@ fn write_bounds_like_dyn_trait(
write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?; write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?;
f.end_location_link(); f.end_location_link();
if is_fn_trait { if is_fn_trait {
if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) { if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner)
if let Some(args) = && let Some(args) =
params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple()) params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple())
{ {
write!(f, "(")?; write!(f, "(")?;
hir_fmt_generic_arguments( hir_fmt_generic_arguments(f, args.as_slice(Interner), self_.ty(Interner))?;
f, write!(f, ")")?;
args.as_slice(Interner),
self_.ty(Interner),
)?;
write!(f, ")")?;
}
} }
} else { } else {
let params = generic_args_sans_defaults( let params = generic_args_sans_defaults(
@ -1879,13 +1872,13 @@ fn write_bounds_like_dyn_trait(
Some(trait_.into()), Some(trait_.into()),
trait_ref.substitution.as_slice(Interner), trait_ref.substitution.as_slice(Interner),
); );
if let [self_, params @ ..] = params { if let [self_, params @ ..] = params
if !params.is_empty() { && !params.is_empty()
write!(f, "<")?; {
hir_fmt_generic_arguments(f, params, self_.ty(Interner))?; write!(f, "<")?;
// there might be assoc type bindings, so we leave the angle brackets open hir_fmt_generic_arguments(f, params, self_.ty(Interner))?;
angle_open = true; // there might be assoc type bindings, so we leave the angle brackets open
} angle_open = true;
} }
} }
} }
@ -2443,11 +2436,11 @@ impl HirDisplayWithExpressionStore for Path {
generic_args.args[0].hir_fmt(f, store)?; generic_args.args[0].hir_fmt(f, store)?;
} }
} }
if let Some(ret) = generic_args.bindings[0].type_ref { if let Some(ret) = generic_args.bindings[0].type_ref
if !matches!(&store[ret], TypeRef::Tuple(v) if v.is_empty()) { && !matches!(&store[ret], TypeRef::Tuple(v) if v.is_empty())
write!(f, " -> ")?; {
ret.hir_fmt(f, store)?; write!(f, " -> ")?;
} ret.hir_fmt(f, store)?;
} }
} }
hir_def::expr_store::path::GenericArgsParentheses::No => { hir_def::expr_store::path::GenericArgsParentheses::No => {

View file

@ -136,16 +136,15 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b
let predicates = predicates.iter().map(|p| p.skip_binders().skip_binders().clone()); let predicates = predicates.iter().map(|p| p.skip_binders().skip_binders().clone());
elaborate_clause_supertraits(db, predicates).any(|pred| match pred { elaborate_clause_supertraits(db, predicates).any(|pred| match pred {
WhereClause::Implemented(trait_ref) => { WhereClause::Implemented(trait_ref) => {
if from_chalk_trait_id(trait_ref.trait_id) == sized { if from_chalk_trait_id(trait_ref.trait_id) == sized
if let TyKind::BoundVar(it) = && let TyKind::BoundVar(it) =
*trait_ref.self_type_parameter(Interner).kind(Interner) *trait_ref.self_type_parameter(Interner).kind(Interner)
{ {
// Since `generic_predicates` is `Binder<Binder<..>>`, the `DebrujinIndex` of // Since `generic_predicates` is `Binder<Binder<..>>`, the `DebrujinIndex` of
// self-parameter is `1` // self-parameter is `1`
return it return it
.index_if_bound_at(DebruijnIndex::ONE) .index_if_bound_at(DebruijnIndex::ONE)
.is_some_and(|idx| idx == trait_self_param_idx); .is_some_and(|idx| idx == trait_self_param_idx);
}
} }
false false
} }
@ -401,10 +400,10 @@ where
cb(MethodViolationCode::ReferencesSelfOutput)?; cb(MethodViolationCode::ReferencesSelfOutput)?;
} }
if !func_data.is_async() { if !func_data.is_async()
if let Some(mvc) = contains_illegal_impl_trait_in_trait(db, &sig) { && let Some(mvc) = contains_illegal_impl_trait_in_trait(db, &sig)
cb(mvc)?; {
} cb(mvc)?;
} }
let generic_params = db.generic_params(func.into()); let generic_params = db.generic_params(func.into());

View file

@ -902,12 +902,12 @@ impl<'db> InferenceContext<'db> {
return false; return false;
} }
if let UnresolvedMethodCall { field_with_same_name, .. } = diagnostic { if let UnresolvedMethodCall { field_with_same_name, .. } = diagnostic
if let Some(ty) = field_with_same_name { && let Some(ty) = field_with_same_name
*ty = table.resolve_completely(ty.clone()); {
if ty.contains_unknown() { *ty = table.resolve_completely(ty.clone());
*field_with_same_name = None; if ty.contains_unknown() {
} *field_with_same_name = None;
} }
} }
} }
@ -1010,12 +1010,12 @@ impl<'db> InferenceContext<'db> {
param_tys.push(va_list_ty); param_tys.push(va_list_ty);
} }
let mut param_tys = param_tys.into_iter().chain(iter::repeat(self.table.new_type_var())); let mut param_tys = param_tys.into_iter().chain(iter::repeat(self.table.new_type_var()));
if let Some(self_param) = self.body.self_param { if let Some(self_param) = self.body.self_param
if let Some(ty) = param_tys.next() { && let Some(ty) = param_tys.next()
let ty = self.insert_type_vars(ty); {
let ty = self.normalize_associated_types_in(ty); let ty = self.insert_type_vars(ty);
self.write_binding_ty(self_param, ty); let ty = self.normalize_associated_types_in(ty);
} self.write_binding_ty(self_param, ty);
} }
let mut tait_candidates = FxHashSet::default(); let mut tait_candidates = FxHashSet::default();
for (ty, pat) in param_tys.zip(&*self.body.params) { for (ty, pat) in param_tys.zip(&*self.body.params) {
@ -1199,20 +1199,19 @@ impl<'db> InferenceContext<'db> {
) -> std::ops::ControlFlow<Self::BreakTy> { ) -> std::ops::ControlFlow<Self::BreakTy> {
let ty = self.table.resolve_ty_shallow(ty); let ty = self.table.resolve_ty_shallow(ty);
if let TyKind::OpaqueType(id, _) = ty.kind(Interner) { if let TyKind::OpaqueType(id, _) = ty.kind(Interner)
if let ImplTraitId::TypeAliasImplTrait(alias_id, _) = && let ImplTraitId::TypeAliasImplTrait(alias_id, _) =
self.db.lookup_intern_impl_trait_id((*id).into()) self.db.lookup_intern_impl_trait_id((*id).into())
{ {
let loc = self.db.lookup_intern_type_alias(alias_id); let loc = self.db.lookup_intern_type_alias(alias_id);
match loc.container { match loc.container {
ItemContainerId::ImplId(impl_id) => { ItemContainerId::ImplId(impl_id) => {
self.assocs.insert(*id, (impl_id, ty.clone())); self.assocs.insert(*id, (impl_id, ty.clone()));
}
ItemContainerId::ModuleId(..) | ItemContainerId::ExternBlockId(..) => {
self.non_assocs.insert(*id, ty.clone());
}
_ => {}
} }
ItemContainerId::ModuleId(..) | ItemContainerId::ExternBlockId(..) => {
self.non_assocs.insert(*id, ty.clone());
}
_ => {}
} }
} }

View file

@ -233,26 +233,25 @@ impl CastCheck {
F: FnMut(ExprId, Vec<Adjustment>), F: FnMut(ExprId, Vec<Adjustment>),
{ {
// Mutability order is opposite to rustc. `Mut < Not` // Mutability order is opposite to rustc. `Mut < Not`
if m_expr <= m_cast { if m_expr <= m_cast
if let TyKind::Array(ety, _) = t_expr.kind(Interner) { && let TyKind::Array(ety, _) = t_expr.kind(Interner)
// Coerce to a raw pointer so that we generate RawPtr in MIR. {
let array_ptr_type = TyKind::Raw(m_expr, t_expr.clone()).intern(Interner); // Coerce to a raw pointer so that we generate RawPtr in MIR.
if let Ok((adj, _)) = table.coerce(&self.expr_ty, &array_ptr_type, CoerceNever::Yes) let array_ptr_type = TyKind::Raw(m_expr, t_expr.clone()).intern(Interner);
{ if let Ok((adj, _)) = table.coerce(&self.expr_ty, &array_ptr_type, CoerceNever::Yes) {
apply_adjustments(self.source_expr, adj); apply_adjustments(self.source_expr, adj);
} else { } else {
never!( never!(
"could not cast from reference to array to pointer to array ({:?} to {:?})", "could not cast from reference to array to pointer to array ({:?} to {:?})",
self.expr_ty, self.expr_ty,
array_ptr_type array_ptr_type
); );
} }
// This is a less strict condition than rustc's `demand_eqtype`, // This is a less strict condition than rustc's `demand_eqtype`,
// but false negative is better than false positive // but false negative is better than false positive
if table.coerce(ety, t_cast, CoerceNever::Yes).is_ok() { if table.coerce(ety, t_cast, CoerceNever::Yes).is_ok() {
return Ok(()); return Ok(());
}
} }
} }

View file

@ -176,12 +176,12 @@ impl InferenceContext<'_> {
} }
// Deduction based on the expected `dyn Fn` is done separately. // Deduction based on the expected `dyn Fn` is done separately.
if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner) { if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner)
if let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty) { && let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty)
let expected_sig_ty = TyKind::Function(sig).intern(Interner); {
let expected_sig_ty = TyKind::Function(sig).intern(Interner);
self.unify(sig_ty, &expected_sig_ty); self.unify(sig_ty, &expected_sig_ty);
}
} }
} }
@ -208,14 +208,13 @@ impl InferenceContext<'_> {
alias: AliasTy::Projection(projection_ty), alias: AliasTy::Projection(projection_ty),
ty: projected_ty, ty: projected_ty,
}) = bound.skip_binders() }) = bound.skip_binders()
{ && let Some(sig) = self.deduce_sig_from_projection(
if let Some(sig) = self.deduce_sig_from_projection(
closure_kind, closure_kind,
projection_ty, projection_ty,
projected_ty, projected_ty,
) { )
return Some(sig); {
} return Some(sig);
} }
None None
}); });
@ -254,55 +253,44 @@ impl InferenceContext<'_> {
let mut expected_kind = None; let mut expected_kind = None;
for clause in elaborate_clause_supertraits(self.db, clauses.rev()) { for clause in elaborate_clause_supertraits(self.db, clauses.rev()) {
if expected_sig.is_none() { if expected_sig.is_none()
if let WhereClause::AliasEq(AliasEq { && let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) =
alias: AliasTy::Projection(projection), &clause
ty, {
}) = &clause let inferred_sig = self.deduce_sig_from_projection(closure_kind, projection, ty);
{ // Make sure that we didn't infer a signature that mentions itself.
let inferred_sig = // This can happen when we elaborate certain supertrait bounds that
self.deduce_sig_from_projection(closure_kind, projection, ty); // mention projections containing the `Self` type. See rust-lang/rust#105401.
// Make sure that we didn't infer a signature that mentions itself. struct MentionsTy<'a> {
// This can happen when we elaborate certain supertrait bounds that expected_ty: &'a Ty,
// mention projections containing the `Self` type. See rust-lang/rust#105401. }
struct MentionsTy<'a> { impl TypeVisitor<Interner> for MentionsTy<'_> {
expected_ty: &'a Ty, type BreakTy = ();
fn interner(&self) -> Interner {
Interner
} }
impl TypeVisitor<Interner> for MentionsTy<'_> {
type BreakTy = ();
fn interner(&self) -> Interner { fn as_dyn(
Interner &mut self,
} ) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy>
fn as_dyn(
&mut self,
) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy>
{
self
}
fn visit_ty(
&mut self,
t: &Ty,
db: chalk_ir::DebruijnIndex,
) -> ControlFlow<()> {
if t == self.expected_ty {
ControlFlow::Break(())
} else {
t.super_visit_with(self, db)
}
}
}
if inferred_sig
.visit_with(
&mut MentionsTy { expected_ty },
chalk_ir::DebruijnIndex::INNERMOST,
)
.is_continue()
{ {
expected_sig = inferred_sig; self
} }
fn visit_ty(&mut self, t: &Ty, db: chalk_ir::DebruijnIndex) -> ControlFlow<()> {
if t == self.expected_ty {
ControlFlow::Break(())
} else {
t.super_visit_with(self, db)
}
}
}
if inferred_sig
.visit_with(&mut MentionsTy { expected_ty }, chalk_ir::DebruijnIndex::INNERMOST)
.is_continue()
{
expected_sig = inferred_sig;
} }
} }
@ -617,11 +605,10 @@ impl HirPlace {
if let CaptureKind::ByRef(BorrowKind::Mut { if let CaptureKind::ByRef(BorrowKind::Mut {
kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow, kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow,
}) = current_capture }) = current_capture
&& self.projections[len..].contains(&ProjectionElem::Deref)
{ {
if self.projections[len..].contains(&ProjectionElem::Deref) { current_capture =
current_capture = CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture });
CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture });
}
} }
current_capture current_capture
} }
@ -1076,12 +1063,11 @@ impl InferenceContext<'_> {
Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared), Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
}; };
if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) { if let Some(place) = self.place_of_expr_without_adjust(tgt_expr)
if let Some(place) = && let Some(place) =
apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest) apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest)
{ {
self.add_capture(place, capture_kind); self.add_capture(place, capture_kind);
}
} }
self.walk_expr_with_adjust(tgt_expr, rest); self.walk_expr_with_adjust(tgt_expr, rest);
} }
@ -1169,15 +1155,15 @@ impl InferenceContext<'_> {
} }
} }
self.walk_expr(*expr); self.walk_expr(*expr);
if let Some(discr_place) = self.place_of_expr(*expr) { if let Some(discr_place) = self.place_of_expr(*expr)
if self.is_upvar(&discr_place) { && self.is_upvar(&discr_place)
let mut capture_mode = None; {
for arm in arms.iter() { let mut capture_mode = None;
self.walk_pat(&mut capture_mode, arm.pat); for arm in arms.iter() {
} self.walk_pat(&mut capture_mode, arm.pat);
if let Some(c) = capture_mode { }
self.push_capture(discr_place, c); if let Some(c) = capture_mode {
} self.push_capture(discr_place, c);
} }
} }
} }
@ -1209,13 +1195,11 @@ impl InferenceContext<'_> {
let mutability = 'b: { let mutability = 'b: {
if let Some(deref_trait) = if let Some(deref_trait) =
self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait()) self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait())
{ && let Some(deref_fn) = deref_trait
if let Some(deref_fn) = deref_trait
.trait_items(self.db) .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref_mut)) .method_by_name(&Name::new_symbol_root(sym::deref_mut))
{ {
break 'b deref_fn == f; break 'b deref_fn == f;
}
} }
false false
}; };
@ -1405,10 +1389,10 @@ impl InferenceContext<'_> {
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty { fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
let mut ty = None; let mut ty = None;
if let Some(it) = self.result.expr_adjustments.get(&e) { if let Some(it) = self.result.expr_adjustments.get(&e)
if let Some(it) = it.last() { && let Some(it) = it.last()
ty = Some(it.target.clone()); {
} ty = Some(it.target.clone());
} }
ty.unwrap_or_else(|| self.expr_ty(e)) ty.unwrap_or_else(|| self.expr_ty(e))
} }
@ -1793,10 +1777,10 @@ impl InferenceContext<'_> {
} }
pub(super) fn add_current_closure_dependency(&mut self, dep: ClosureId) { pub(super) fn add_current_closure_dependency(&mut self, dep: ClosureId) {
if let Some(c) = self.current_closure { if let Some(c) = self.current_closure
if !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep) { && !dep_creates_cycle(&self.closure_dependencies, &mut FxHashSet::default(), c, dep)
self.closure_dependencies.entry(c).or_default().push(dep); {
} self.closure_dependencies.entry(c).or_default().push(dep);
} }
fn dep_creates_cycle( fn dep_creates_cycle(

View file

@ -164,14 +164,14 @@ impl CoerceMany {
// - [Comment from rustc](https://github.com/rust-lang/rust/blob/5ff18d0eaefd1bd9ab8ec33dab2404a44e7631ed/compiler/rustc_hir_typeck/src/coercion.rs#L1334-L1335) // - [Comment from rustc](https://github.com/rust-lang/rust/blob/5ff18d0eaefd1bd9ab8ec33dab2404a44e7631ed/compiler/rustc_hir_typeck/src/coercion.rs#L1334-L1335)
// First try to coerce the new expression to the type of the previous ones, // First try to coerce the new expression to the type of the previous ones,
// but only if the new expression has no coercion already applied to it. // but only if the new expression has no coercion already applied to it.
if expr.is_none_or(|expr| !ctx.result.expr_adjustments.contains_key(&expr)) { if expr.is_none_or(|expr| !ctx.result.expr_adjustments.contains_key(&expr))
if let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes) { && let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes)
self.final_ty = Some(res); {
if let Some(expr) = expr { self.final_ty = Some(res);
self.expressions.push(expr); if let Some(expr) = expr {
} self.expressions.push(expr);
return;
} }
return;
} }
if let Ok((adjustments, res)) = if let Ok((adjustments, res)) =
@ -322,18 +322,13 @@ impl InferenceTable<'_> {
// If we are coercing into a TAIT, coerce into its proxy inference var, instead. // If we are coercing into a TAIT, coerce into its proxy inference var, instead.
let mut to_ty = to_ty; let mut to_ty = to_ty;
let _to; let _to;
if let Some(tait_table) = &self.tait_coercion_table { if let Some(tait_table) = &self.tait_coercion_table
if let TyKind::OpaqueType(opaque_ty_id, _) = to_ty.kind(Interner) { && let TyKind::OpaqueType(opaque_ty_id, _) = to_ty.kind(Interner)
if !matches!( && !matches!(from_ty.kind(Interner), TyKind::InferenceVar(..) | TyKind::OpaqueType(..))
from_ty.kind(Interner), && let Some(ty) = tait_table.get(opaque_ty_id)
TyKind::InferenceVar(..) | TyKind::OpaqueType(..) {
) { _to = ty.clone();
if let Some(ty) = tait_table.get(opaque_ty_id) { to_ty = &_to;
_to = ty.clone();
to_ty = &_to;
}
}
}
} }
// Consider coercing the subtype to a DST // Consider coercing the subtype to a DST
@ -594,14 +589,13 @@ impl InferenceTable<'_> {
F: FnOnce(Ty) -> Vec<Adjustment>, F: FnOnce(Ty) -> Vec<Adjustment>,
G: FnOnce(Ty) -> Vec<Adjustment>, G: FnOnce(Ty) -> Vec<Adjustment>,
{ {
if let TyKind::Function(to_fn_ptr) = to_ty.kind(Interner) { if let TyKind::Function(to_fn_ptr) = to_ty.kind(Interner)
if let (chalk_ir::Safety::Safe, chalk_ir::Safety::Unsafe) = && let (chalk_ir::Safety::Safe, chalk_ir::Safety::Unsafe) =
(from_fn_ptr.sig.safety, to_fn_ptr.sig.safety) (from_fn_ptr.sig.safety, to_fn_ptr.sig.safety)
{ {
let from_unsafe = let from_unsafe =
TyKind::Function(safe_to_unsafe_fn_ty(from_fn_ptr.clone())).intern(Interner); TyKind::Function(safe_to_unsafe_fn_ty(from_fn_ptr.clone())).intern(Interner);
return self.unify_and(&from_unsafe, to_ty, to_unsafe); return self.unify_and(&from_unsafe, to_ty, to_unsafe);
}
} }
self.unify_and(&from_ty, to_ty, normal) self.unify_and(&from_ty, to_ty, normal)
} }

View file

@ -653,19 +653,18 @@ impl InferenceContext<'_> {
// FIXME: Note down method resolution her // FIXME: Note down method resolution her
match op { match op {
UnaryOp::Deref => { UnaryOp::Deref => {
if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref) { if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref)
if let Some(deref_fn) = deref_trait && let Some(deref_fn) = deref_trait
.trait_items(self.db) .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref)) .method_by_name(&Name::new_symbol_root(sym::deref))
{ {
// FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that // FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that
// the mutability is not wrong, and will be fixed in `self.infer_mut`). // the mutability is not wrong, and will be fixed in `self.infer_mut`).
self.write_method_resolution( self.write_method_resolution(
tgt_expr, tgt_expr,
deref_fn, deref_fn,
Substitution::empty(Interner), Substitution::empty(Interner),
); );
}
} }
if let Some(derefed) = builtin_deref(self.table.db, &inner_ty, true) { if let Some(derefed) = builtin_deref(self.table.db, &inner_ty, true) {
self.resolve_ty_shallow(derefed) self.resolve_ty_shallow(derefed)
@ -1387,28 +1386,28 @@ impl InferenceContext<'_> {
let ret_ty = match method_ty.callable_sig(self.db) { let ret_ty = match method_ty.callable_sig(self.db) {
Some(sig) => { Some(sig) => {
let p_left = &sig.params()[0]; let p_left = &sig.params()[0];
if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. }) { if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. })
if let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) { && let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner)
self.write_expr_adj( {
lhs, self.write_expr_adj(
Box::new([Adjustment { lhs,
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), Box::new([Adjustment {
target: p_left.clone(), kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
}]), target: p_left.clone(),
); }]),
} );
} }
let p_right = &sig.params()[1]; let p_right = &sig.params()[1];
if matches!(op, BinaryOp::CmpOp(..)) { if matches!(op, BinaryOp::CmpOp(..))
if let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) { && let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner)
self.write_expr_adj( {
rhs, self.write_expr_adj(
Box::new([Adjustment { rhs,
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), Box::new([Adjustment {
target: p_right.clone(), kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
}]), target: p_right.clone(),
); }]),
} );
} }
sig.ret().clone() sig.ret().clone()
} }
@ -1664,14 +1663,12 @@ impl InferenceContext<'_> {
Some((ty, field_id, adjustments, is_public)) => { Some((ty, field_id, adjustments, is_public)) => {
self.write_expr_adj(receiver, adjustments.into_boxed_slice()); self.write_expr_adj(receiver, adjustments.into_boxed_slice());
self.result.field_resolutions.insert(tgt_expr, field_id); self.result.field_resolutions.insert(tgt_expr, field_id);
if !is_public { if !is_public && let Either::Left(field) = field_id {
if let Either::Left(field) = field_id { // FIXME: Merge this diagnostic into UnresolvedField?
// FIXME: Merge this diagnostic into UnresolvedField? self.push_diagnostic(InferenceDiagnostic::PrivateField {
self.push_diagnostic(InferenceDiagnostic::PrivateField { expr: tgt_expr,
expr: tgt_expr, field,
field, });
});
}
} }
ty ty
} }

View file

@ -124,53 +124,41 @@ impl InferenceContext<'_> {
self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
} }
&Expr::Index { base, index } => { &Expr::Index { base, index } => {
if mutability == Mutability::Mut { if mutability == Mutability::Mut
if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { && let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr)
if let Some(index_trait) = && let Some(index_trait) =
LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate) LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate)
{ && let Some(index_fn) = index_trait
if let Some(index_fn) = index_trait .trait_items(self.db)
.trait_items(self.db) .method_by_name(&Name::new_symbol_root(sym::index_mut))
.method_by_name(&Name::new_symbol_root(sym::index_mut)) {
{ *f = index_fn;
*f = index_fn; let mut base_ty = None;
let mut base_ty = None; let base_adjustments =
let base_adjustments = self self.result.expr_adjustments.get_mut(&base).and_then(|it| it.last_mut());
.result if let Some(Adjustment {
.expr_adjustments kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)),
.get_mut(&base) target,
.and_then(|it| it.last_mut()); }) = base_adjustments
if let Some(Adjustment { {
kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)), if let TyKind::Ref(_, _, ty) = target.kind(Interner) {
target, base_ty = Some(ty.clone());
}) = base_adjustments
{
if let TyKind::Ref(_, _, ty) = target.kind(Interner) {
base_ty = Some(ty.clone());
}
*mutability = Mutability::Mut;
}
// Apply `IndexMut` obligation for non-assignee expr
if let Some(base_ty) = base_ty {
let index_ty =
if let Some(ty) = self.result.type_of_expr.get(index) {
ty.clone()
} else {
self.infer_expr(
index,
&Expectation::none(),
ExprIsRead::Yes,
)
};
let trait_ref = TyBuilder::trait_ref(self.db, index_trait)
.push(base_ty)
.fill(|_| index_ty.clone().cast(Interner))
.build();
self.push_obligation(trait_ref.cast(Interner));
}
}
} }
*mutability = Mutability::Mut;
}
// Apply `IndexMut` obligation for non-assignee expr
if let Some(base_ty) = base_ty {
let index_ty = if let Some(ty) = self.result.type_of_expr.get(index) {
ty.clone()
} else {
self.infer_expr(index, &Expectation::none(), ExprIsRead::Yes)
};
let trait_ref = TyBuilder::trait_ref(self.db, index_trait)
.push(base_ty)
.fill(|_| index_ty.clone().cast(Interner))
.build();
self.push_obligation(trait_ref.cast(Interner));
} }
} }
self.infer_mut_expr(base, mutability); self.infer_mut_expr(base, mutability);
@ -178,28 +166,23 @@ impl InferenceContext<'_> {
} }
Expr::UnaryOp { expr, op: UnaryOp::Deref } => { Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
let mut mutability = mutability; let mut mutability = mutability;
if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr)
if mutability == Mutability::Mut { && mutability == Mutability::Mut
if let Some(deref_trait) = && let Some(deref_trait) =
LangItem::DerefMut.resolve_trait(self.db, self.table.trait_env.krate) LangItem::DerefMut.resolve_trait(self.db, self.table.trait_env.krate)
{ {
let ty = self.result.type_of_expr.get(*expr); let ty = self.result.type_of_expr.get(*expr);
let is_mut_ptr = ty.is_some_and(|ty| { let is_mut_ptr = ty.is_some_and(|ty| {
let ty = self.table.resolve_ty_shallow(ty); let ty = self.table.resolve_ty_shallow(ty);
matches!( matches!(ty.kind(Interner), chalk_ir::TyKind::Raw(Mutability::Mut, _))
ty.kind(Interner), });
chalk_ir::TyKind::Raw(Mutability::Mut, _) if is_mut_ptr {
) mutability = Mutability::Not;
}); } else if let Some(deref_fn) = deref_trait
if is_mut_ptr { .trait_items(self.db)
mutability = Mutability::Not; .method_by_name(&Name::new_symbol_root(sym::deref_mut))
} else if let Some(deref_fn) = deref_trait {
.trait_items(self.db) *f = deref_fn;
.method_by_name(&Name::new_symbol_root(sym::deref_mut))
{
*f = deref_fn;
}
}
} }
} }
self.infer_mut_expr(*expr, mutability); self.infer_mut_expr(*expr, mutability);

View file

@ -498,12 +498,12 @@ impl InferenceContext<'_> {
// If `expected` is an infer ty, we try to equate it to an array if the given pattern // If `expected` is an infer ty, we try to equate it to an array if the given pattern
// allows it. See issue #16609 // allows it. See issue #16609
if self.pat_is_irrefutable(decl) && expected.is_ty_var() { if self.pat_is_irrefutable(decl)
if let Some(resolved_array_ty) = && expected.is_ty_var()
&& let Some(resolved_array_ty) =
self.try_resolve_slice_ty_to_array_ty(prefix, suffix, slice) self.try_resolve_slice_ty_to_array_ty(prefix, suffix, slice)
{ {
self.unify(&expected, &resolved_array_ty); self.unify(&expected, &resolved_array_ty);
}
} }
let expected = self.resolve_ty_shallow(&expected); let expected = self.resolve_ty_shallow(&expected);
@ -539,17 +539,16 @@ impl InferenceContext<'_> {
fn infer_lit_pat(&mut self, expr: ExprId, expected: &Ty) -> Ty { fn infer_lit_pat(&mut self, expr: ExprId, expected: &Ty) -> Ty {
// Like slice patterns, byte string patterns can denote both `&[u8; N]` and `&[u8]`. // Like slice patterns, byte string patterns can denote both `&[u8; N]` and `&[u8]`.
if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] { if let Expr::Literal(Literal::ByteString(_)) = self.body[expr]
if let Some((inner, ..)) = expected.as_reference() { && let Some((inner, ..)) = expected.as_reference()
let inner = self.resolve_ty_shallow(inner); {
if matches!(inner.kind(Interner), TyKind::Slice(_)) { let inner = self.resolve_ty_shallow(inner);
let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner); if matches!(inner.kind(Interner), TyKind::Slice(_)) {
let slice_ty = TyKind::Slice(elem_ty).intern(Interner); let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
let ty = let slice_ty = TyKind::Slice(elem_ty).intern(Interner);
TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty).intern(Interner); let ty = TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty).intern(Interner);
self.write_expr_ty(expr, ty.clone()); self.write_expr_ty(expr, ty.clone());
return ty; return ty;
}
} }
} }

View file

@ -830,10 +830,10 @@ fn named_associated_type_shorthand_candidates<R>(
let data = t.hir_trait_id().trait_items(db); let data = t.hir_trait_id().trait_items(db);
for (name, assoc_id) in &data.items { for (name, assoc_id) in &data.items {
if let AssocItemId::TypeAliasId(alias) = assoc_id { if let AssocItemId::TypeAliasId(alias) = assoc_id
if let Some(result) = cb(name, &t, *alias) { && let Some(result) = cb(name, &t, *alias)
return Some(result); {
} return Some(result);
} }
} }
None None

View file

@ -360,15 +360,14 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
} }
} }
if let Some(enum_segment) = enum_segment { if let Some(enum_segment) = enum_segment
if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) && segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some())
&& segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some())
{ {
self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
segment: (enum_segment + 1) as u32, segment: (enum_segment + 1) as u32,
reason: GenericArgsProhibitedReason::EnumVariant, reason: GenericArgsProhibitedReason::EnumVariant,
}); });
}
} }
self.handle_type_ns_resolution(&resolution); self.handle_type_ns_resolution(&resolution);
@ -417,15 +416,14 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
} }
} }
if let Some(enum_segment) = enum_segment { if let Some(enum_segment) = enum_segment
if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) && segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some())
&& segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some())
{ {
self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
segment: (enum_segment + 1) as u32, segment: (enum_segment + 1) as u32,
reason: GenericArgsProhibitedReason::EnumVariant, reason: GenericArgsProhibitedReason::EnumVariant,
}); });
}
} }
match &res { match &res {
@ -576,13 +574,12 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
// This simplifies the code a bit. // This simplifies the code a bit.
let penultimate_idx = self.current_segment_idx.wrapping_sub(1); let penultimate_idx = self.current_segment_idx.wrapping_sub(1);
let penultimate = self.segments.get(penultimate_idx); let penultimate = self.segments.get(penultimate_idx);
if let Some(penultimate) = penultimate { if let Some(penultimate) = penultimate
if self.current_or_prev_segment.args_and_bindings.is_none() && self.current_or_prev_segment.args_and_bindings.is_none()
&& penultimate.args_and_bindings.is_some() && penultimate.args_and_bindings.is_some()
{ {
self.current_segment_idx = penultimate_idx; self.current_segment_idx = penultimate_idx;
self.current_or_prev_segment = penultimate; self.current_or_prev_segment = penultimate;
}
} }
var.lookup(self.ctx.db).parent.into() var.lookup(self.ctx.db).parent.into()
} }
@ -607,37 +604,36 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
) -> Substitution { ) -> Substitution {
let mut lifetime_elision = self.ctx.lifetime_elision.clone(); let mut lifetime_elision = self.ctx.lifetime_elision.clone();
if let Some(args) = self.current_or_prev_segment.args_and_bindings { if let Some(args) = self.current_or_prev_segment.args_and_bindings
if args.parenthesized != GenericArgsParentheses::No { && args.parenthesized != GenericArgsParentheses::No
let prohibit_parens = match def { {
GenericDefId::TraitId(trait_) => { let prohibit_parens = match def {
// RTN is prohibited anyways if we got here. GenericDefId::TraitId(trait_) => {
let is_rtn = // RTN is prohibited anyways if we got here.
args.parenthesized == GenericArgsParentheses::ReturnTypeNotation; let is_rtn = args.parenthesized == GenericArgsParentheses::ReturnTypeNotation;
let is_fn_trait = self let is_fn_trait = self
.ctx .ctx
.db .db
.trait_signature(trait_) .trait_signature(trait_)
.flags .flags
.contains(TraitFlags::RUSTC_PAREN_SUGAR); .contains(TraitFlags::RUSTC_PAREN_SUGAR);
is_rtn || !is_fn_trait is_rtn || !is_fn_trait
}
_ => true,
};
if prohibit_parens {
let segment = self.current_segment_u32();
self.on_diagnostic(
PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment },
);
return TyBuilder::unknown_subst(self.ctx.db, def);
} }
_ => true,
};
// `Fn()`-style generics are treated like functions for the purpose of lifetime elision. if prohibit_parens {
lifetime_elision = let segment = self.current_segment_u32();
LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }; self.on_diagnostic(
PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment },
);
return TyBuilder::unknown_subst(self.ctx.db, def);
} }
// `Fn()`-style generics are treated like functions for the purpose of lifetime elision.
lifetime_elision =
LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false };
} }
self.substs_from_args_and_bindings( self.substs_from_args_and_bindings(
@ -753,18 +749,20 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
match param { match param {
GenericParamDataRef::LifetimeParamData(_) => error_lifetime().cast(Interner), GenericParamDataRef::LifetimeParamData(_) => error_lifetime().cast(Interner),
GenericParamDataRef::TypeParamData(param) => { GenericParamDataRef::TypeParamData(param) => {
if !infer_args && param.default.is_some() { if !infer_args
if let Some(default) = default() { && param.default.is_some()
return default; && let Some(default) = default()
} {
return default;
} }
TyKind::Error.intern(Interner).cast(Interner) TyKind::Error.intern(Interner).cast(Interner)
} }
GenericParamDataRef::ConstParamData(param) => { GenericParamDataRef::ConstParamData(param) => {
if !infer_args && param.default.is_some() { if !infer_args
if let Some(default) = default() { && param.default.is_some()
return default; && let Some(default) = default()
} {
return default;
} }
let GenericParamId::ConstParamId(const_id) = param_id else { let GenericParamId::ConstParamId(const_id) = param_id else {
unreachable!("non-const param ID for const param"); unreachable!("non-const param ID for const param");

View file

@ -581,15 +581,15 @@ impl ReceiverAdjustments {
} }
if self.unsize_array { if self.unsize_array {
ty = 'it: { ty = 'it: {
if let TyKind::Ref(m, l, inner) = ty.kind(Interner) { if let TyKind::Ref(m, l, inner) = ty.kind(Interner)
if let TyKind::Array(inner, _) = inner.kind(Interner) { && let TyKind::Array(inner, _) = inner.kind(Interner)
break 'it TyKind::Ref( {
*m, break 'it TyKind::Ref(
l.clone(), *m,
TyKind::Slice(inner.clone()).intern(Interner), l.clone(),
) TyKind::Slice(inner.clone()).intern(Interner),
.intern(Interner); )
} .intern(Interner);
} }
// FIXME: report diagnostic if array unsizing happens without indirection. // FIXME: report diagnostic if array unsizing happens without indirection.
ty ty
@ -1549,11 +1549,11 @@ fn is_valid_impl_method_candidate(
check_that!(receiver_ty.is_none()); check_that!(receiver_ty.is_none());
check_that!(name.is_none_or(|n| n == item_name)); check_that!(name.is_none_or(|n| n == item_name));
if let Some(from_module) = visible_from_module { if let Some(from_module) = visible_from_module
if !db.assoc_visibility(c.into()).is_visible_from(db, from_module) { && !db.assoc_visibility(c.into()).is_visible_from(db, from_module)
cov_mark::hit!(const_candidate_not_visible); {
return IsValidCandidate::NotVisible; cov_mark::hit!(const_candidate_not_visible);
} return IsValidCandidate::NotVisible;
} }
let self_ty_matches = table.run_in_snapshot(|table| { let self_ty_matches = table.run_in_snapshot(|table| {
let expected_self_ty = let expected_self_ty =
@ -1638,11 +1638,11 @@ fn is_valid_impl_fn_candidate(
let db = table.db; let db = table.db;
let data = db.function_signature(fn_id); let data = db.function_signature(fn_id);
if let Some(from_module) = visible_from_module { if let Some(from_module) = visible_from_module
if !db.assoc_visibility(fn_id.into()).is_visible_from(db, from_module) { && !db.assoc_visibility(fn_id.into()).is_visible_from(db, from_module)
cov_mark::hit!(autoderef_candidate_not_visible); {
return IsValidCandidate::NotVisible; cov_mark::hit!(autoderef_candidate_not_visible);
} return IsValidCandidate::NotVisible;
} }
table.run_in_snapshot(|table| { table.run_in_snapshot(|table| {
let _p = tracing::info_span!("subst_for_def").entered(); let _p = tracing::info_span!("subst_for_def").entered();

View file

@ -559,10 +559,9 @@ fn mutability_of_locals(
}, },
p, p,
) = value ) = value
&& place_case(db, body, p) != ProjectionCase::Indirect
{ {
if place_case(db, body, p) != ProjectionCase::Indirect { push_mut_span(p.local, statement.span, &mut result);
push_mut_span(p.local, statement.span, &mut result);
}
} }
} }
StatementKind::FakeRead(p) => { StatementKind::FakeRead(p) => {

View file

@ -1082,18 +1082,18 @@ impl Evaluator<'_> {
let stack_size = { let stack_size = {
let mut stack_ptr = self.stack.len(); let mut stack_ptr = self.stack.len();
for (id, it) in body.locals.iter() { for (id, it) in body.locals.iter() {
if id == return_slot() { if id == return_slot()
if let Some(destination) = destination { && let Some(destination) = destination
locals.ptr.insert(id, destination); {
continue; locals.ptr.insert(id, destination);
} continue;
} }
let (size, align) = self.size_align_of_sized( let (size, align) = self.size_align_of_sized(
&it.ty, &it.ty,
&locals, &locals,
"no unsized local in extending stack", "no unsized local in extending stack",
)?; )?;
while stack_ptr % align != 0 { while !stack_ptr.is_multiple_of(align) {
stack_ptr += 1; stack_ptr += 1;
} }
let my_ptr = stack_ptr; let my_ptr = stack_ptr;
@ -1673,14 +1673,14 @@ impl Evaluator<'_> {
if let Some(it) = goal(kind) { if let Some(it) = goal(kind) {
return Ok(it); return Ok(it);
} }
if let TyKind::Adt(id, subst) = kind { if let TyKind::Adt(id, subst) = kind
if let AdtId::StructId(struct_id) = id.0 { && let AdtId::StructId(struct_id) = id.0
let field_types = self.db.field_types(struct_id.into()); {
if let Some(ty) = let field_types = self.db.field_types(struct_id.into());
field_types.iter().last().map(|it| it.1.clone().substitute(Interner, subst)) if let Some(ty) =
{ field_types.iter().last().map(|it| it.1.clone().substitute(Interner, subst))
return self.coerce_unsized_look_through_fields(&ty, goal); {
} return self.coerce_unsized_look_through_fields(&ty, goal);
} }
} }
Err(MirEvalError::CoerceUnsizedError(ty.clone())) Err(MirEvalError::CoerceUnsizedError(ty.clone()))
@ -1778,17 +1778,15 @@ impl Evaluator<'_> {
locals: &Locals, locals: &Locals,
) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> { ) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
let adt = it.adt_id(self.db); let adt = it.adt_id(self.db);
if let DefWithBodyId::VariantId(f) = locals.body.owner { if let DefWithBodyId::VariantId(f) = locals.body.owner
if let VariantId::EnumVariantId(it) = it { && let VariantId::EnumVariantId(it) = it
if let AdtId::EnumId(e) = adt { && let AdtId::EnumId(e) = adt
if f.lookup(self.db).parent == e { && f.lookup(self.db).parent == e
// Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and {
// infinite sized type errors) we use a dummy layout // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
let i = self.const_eval_discriminant(it)?; // infinite sized type errors) we use a dummy layout
return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i)))); let i = self.const_eval_discriminant(it)?;
} return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i))));
}
}
} }
let layout = self.layout_adt(adt, subst)?; let layout = self.layout_adt(adt, subst)?;
Ok(match &layout.variants { Ok(match &layout.variants {
@ -1909,10 +1907,10 @@ impl Evaluator<'_> {
let name = const_id.name(self.db); let name = const_id.name(self.db);
MirEvalError::ConstEvalError(name, Box::new(e)) MirEvalError::ConstEvalError(name, Box::new(e))
})?; })?;
if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value { if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value
if let ConstScalar::Bytes(v, mm) = &c.interned { && let ConstScalar::Bytes(v, mm) = &c.interned
break 'b (v, mm); {
} break 'b (v, mm);
} }
not_supported!("unevaluatable constant"); not_supported!("unevaluatable constant");
} }
@ -2055,14 +2053,13 @@ impl Evaluator<'_> {
.is_sized() .is_sized()
.then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize))); .then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize)));
} }
if let DefWithBodyId::VariantId(f) = locals.body.owner { if let DefWithBodyId::VariantId(f) = locals.body.owner
if let Some((AdtId::EnumId(e), _)) = ty.as_adt() { && let Some((AdtId::EnumId(e), _)) = ty.as_adt()
if f.lookup(self.db).parent == e { && f.lookup(self.db).parent == e
// Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and {
// infinite sized type errors) we use a dummy size // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
return Ok(Some((16, 16))); // infinite sized type errors) we use a dummy size
} return Ok(Some((16, 16)));
}
} }
let layout = self.layout(ty); let layout = self.layout(ty);
if self.assert_placeholder_ty_is_unused if self.assert_placeholder_ty_is_unused
@ -2103,7 +2100,7 @@ impl Evaluator<'_> {
if !align.is_power_of_two() || align > 10000 { if !align.is_power_of_two() || align > 10000 {
return Err(MirEvalError::UndefinedBehavior(format!("Alignment {align} is invalid"))); return Err(MirEvalError::UndefinedBehavior(format!("Alignment {align} is invalid")));
} }
while self.heap.len() % align != 0 { while !self.heap.len().is_multiple_of(align) {
self.heap.push(0); self.heap.push(0);
} }
if size.checked_add(self.heap.len()).is_none_or(|x| x > self.memory_limit) { if size.checked_add(self.heap.len()).is_none_or(|x| x > self.memory_limit) {

View file

@ -119,25 +119,25 @@ impl Evaluator<'_> {
destination.write_from_bytes(self, &result)?; destination.write_from_bytes(self, &result)?;
return Ok(true); return Ok(true);
} }
if let ItemContainerId::TraitId(t) = def.lookup(self.db).container { if let ItemContainerId::TraitId(t) = def.lookup(self.db).container
if self.db.lang_attr(t.into()) == Some(LangItem::Clone) { && self.db.lang_attr(t.into()) == Some(LangItem::Clone)
let [self_ty] = generic_args.as_slice(Interner) else { {
not_supported!("wrong generic arg count for clone"); let [self_ty] = generic_args.as_slice(Interner) else {
}; not_supported!("wrong generic arg count for clone");
let Some(self_ty) = self_ty.ty(Interner) else { };
not_supported!("wrong generic arg kind for clone"); let Some(self_ty) = self_ty.ty(Interner) else {
}; not_supported!("wrong generic arg kind for clone");
// Clone has special impls for tuples and function pointers };
if matches!( // Clone has special impls for tuples and function pointers
self_ty.kind(Interner), if matches!(
TyKind::Function(_) | TyKind::Tuple(..) | TyKind::Closure(..) self_ty.kind(Interner),
) { TyKind::Function(_) | TyKind::Tuple(..) | TyKind::Closure(..)
self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?; ) {
return Ok(true); self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?;
} return Ok(true);
// Return early to prevent caching clone as non special fn.
return Ok(false);
} }
// Return early to prevent caching clone as non special fn.
return Ok(false);
} }
self.not_special_fn_cache.borrow_mut().insert(def); self.not_special_fn_cache.borrow_mut().insert(def);
Ok(false) Ok(false)
@ -1256,23 +1256,22 @@ impl Evaluator<'_> {
let addr = tuple.interval.addr.offset(offset); let addr = tuple.interval.addr.offset(offset);
args.push(IntervalAndTy::new(addr, field, self, locals)?); args.push(IntervalAndTy::new(addr, field, self, locals)?);
} }
if let Some(target) = LangItem::FnOnce.resolve_trait(self.db, self.crate_id) { if let Some(target) = LangItem::FnOnce.resolve_trait(self.db, self.crate_id)
if let Some(def) = target && let Some(def) = target
.trait_items(self.db) .trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::call_once)) .method_by_name(&Name::new_symbol_root(sym::call_once))
{ {
self.exec_fn_trait( self.exec_fn_trait(
def, def,
&args, &args,
// FIXME: wrong for manual impls of `FnOnce` // FIXME: wrong for manual impls of `FnOnce`
Substitution::empty(Interner), Substitution::empty(Interner),
locals, locals,
destination, destination,
None, None,
span, span,
)?; )?;
return Ok(true); return Ok(true);
}
} }
not_supported!("FnOnce was not available for executing const_eval_select"); not_supported!("FnOnce was not available for executing const_eval_select");
} }
@ -1367,12 +1366,11 @@ impl Evaluator<'_> {
break; break;
} }
} }
if signed { if signed
if let Some((&l, &r)) = lhs.iter().zip(rhs).next_back() { && let Some((&l, &r)) = lhs.iter().zip(rhs).next_back()
if l != r { && l != r
result = (l as i8).cmp(&(r as i8)); {
} result = (l as i8).cmp(&(r as i8));
}
} }
if let Some(e) = LangItem::Ordering.resolve_enum(self.db, self.crate_id) { if let Some(e) = LangItem::Ordering.resolve_enum(self.db, self.crate_id) {
let ty = self.db.ty(e.into()); let ty = self.db.ty(e.into());

View file

@ -114,12 +114,11 @@ impl Evaluator<'_> {
break; break;
} }
} }
if is_signed { if is_signed
if let Some((&l, &r)) = l.iter().zip(r).next_back() { && let Some((&l, &r)) = l.iter().zip(r).next_back()
if l != r { && l != r
result = (l as i8).cmp(&(r as i8)); {
} result = (l as i8).cmp(&(r as i8));
}
} }
let result = match result { let result = match result {
Ordering::Less => ["lt", "le", "ne"].contains(&name), Ordering::Less => ["lt", "le", "ne"].contains(&name),

View file

@ -320,11 +320,11 @@ impl<'ctx> MirLowerCtx<'ctx> {
expr_id: ExprId, expr_id: ExprId,
current: BasicBlockId, current: BasicBlockId,
) -> Result<Option<(Operand, BasicBlockId)>> { ) -> Result<Option<(Operand, BasicBlockId)>> {
if !self.has_adjustments(expr_id) { if !self.has_adjustments(expr_id)
if let Expr::Literal(l) = &self.body[expr_id] { && let Expr::Literal(l) = &self.body[expr_id]
let ty = self.expr_ty_without_adjust(expr_id); {
return Ok(Some((self.lower_literal_to_operand(ty, l)?, current))); let ty = self.expr_ty_without_adjust(expr_id);
} return Ok(Some((self.lower_literal_to_operand(ty, l)?, current)));
} }
let Some((p, current)) = self.lower_expr_as_place(current, expr_id, true)? else { let Some((p, current)) = self.lower_expr_as_place(current, expr_id, true)? else {
return Ok(None); return Ok(None);
@ -1039,18 +1039,18 @@ impl<'ctx> MirLowerCtx<'ctx> {
&& rhs_ty.is_scalar() && rhs_ty.is_scalar()
&& (lhs_ty == rhs_ty || builtin_inequal_impls) && (lhs_ty == rhs_ty || builtin_inequal_impls)
}; };
if !is_builtin { if !is_builtin
if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) { && let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id)
let func = Operand::from_fn(self.db, func_id, generic_args); {
return self.lower_call_and_args( let func = Operand::from_fn(self.db, func_id, generic_args);
func, return self.lower_call_and_args(
[*lhs, *rhs].into_iter(), func,
place, [*lhs, *rhs].into_iter(),
current, place,
self.is_uninhabited(expr_id), current,
expr_id.into(), self.is_uninhabited(expr_id),
); expr_id.into(),
} );
} }
if let hir_def::hir::BinaryOp::Assignment { op: Some(op) } = op { if let hir_def::hir::BinaryOp::Assignment { op: Some(op) } = op {
// last adjustment is `&mut` which we don't want it. // last adjustment is `&mut` which we don't want it.
@ -1596,10 +1596,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty { fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
let mut ty = None; let mut ty = None;
if let Some(it) = self.infer.expr_adjustments.get(&e) { if let Some(it) = self.infer.expr_adjustments.get(&e)
if let Some(it) = it.last() { && let Some(it) = it.last()
ty = Some(it.target.clone()); {
} ty = Some(it.target.clone());
} }
ty.unwrap_or_else(|| self.expr_ty_without_adjust(e)) ty.unwrap_or_else(|| self.expr_ty_without_adjust(e))
} }
@ -1848,13 +1848,13 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.result.param_locals.extend(params.clone().map(|(it, ty)| { self.result.param_locals.extend(params.clone().map(|(it, ty)| {
let local_id = self.result.locals.alloc(Local { ty }); let local_id = self.result.locals.alloc(Local { ty });
self.drop_scopes.last_mut().unwrap().locals.push(local_id); self.drop_scopes.last_mut().unwrap().locals.push(local_id);
if let Pat::Bind { id, subpat: None } = self.body[it] { if let Pat::Bind { id, subpat: None } = self.body[it]
if matches!( && matches!(
self.body[id].mode, self.body[id].mode,
BindingAnnotation::Unannotated | BindingAnnotation::Mutable BindingAnnotation::Unannotated | BindingAnnotation::Mutable
) { )
self.result.binding_locals.insert(id, local_id); {
} self.result.binding_locals.insert(id, local_id);
} }
local_id local_id
})); }));
@ -1887,10 +1887,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
.into_iter() .into_iter()
.skip(base_param_count + self_binding.is_some() as usize); .skip(base_param_count + self_binding.is_some() as usize);
for ((param, _), local) in params.zip(local_params) { for ((param, _), local) in params.zip(local_params) {
if let Pat::Bind { id, .. } = self.body[param] { if let Pat::Bind { id, .. } = self.body[param]
if local == self.binding_local(id)? { && local == self.binding_local(id)?
continue; {
} continue;
} }
let r = self.pattern_match(current, None, local.into(), param)?; let r = self.pattern_match(current, None, local.into(), param)?;
if let Some(b) = r.1 { if let Some(b) = r.1 {

View file

@ -189,17 +189,14 @@ impl MirLowerCtx<'_> {
self.expr_ty_without_adjust(expr_id), self.expr_ty_without_adjust(expr_id),
expr_id.into(), expr_id.into(),
'b: { 'b: {
if let Some((f, _)) = self.infer.method_resolution(expr_id) { if let Some((f, _)) = self.infer.method_resolution(expr_id)
if let Some(deref_trait) = && let Some(deref_trait) =
self.resolve_lang_item(LangItem::DerefMut)?.as_trait() self.resolve_lang_item(LangItem::DerefMut)?.as_trait()
{ && let Some(deref_fn) = deref_trait
if let Some(deref_fn) = deref_trait .trait_items(self.db)
.trait_items(self.db) .method_by_name(&Name::new_symbol_root(sym::deref_mut))
.method_by_name(&Name::new_symbol_root(sym::deref_mut)) {
{ break 'b deref_fn == f;
break 'b deref_fn == f;
}
}
} }
false false
}, },

View file

@ -317,27 +317,26 @@ impl MirLowerCtx<'_> {
(current, current_else) = (current, current_else) =
self.pattern_match_inner(current, current_else, next_place, pat, mode)?; self.pattern_match_inner(current, current_else, next_place, pat, mode)?;
} }
if let &Some(slice) = slice { if let &Some(slice) = slice
if mode != MatchingMode::Check { && mode != MatchingMode::Check
if let Pat::Bind { id, subpat: _ } = self.body[slice] { && let Pat::Bind { id, subpat: _ } = self.body[slice]
let next_place = cond_place.project( {
ProjectionElem::Subslice { let next_place = cond_place.project(
from: prefix.len() as u64, ProjectionElem::Subslice {
to: suffix.len() as u64, from: prefix.len() as u64,
}, to: suffix.len() as u64,
&mut self.result.projection_store, },
); &mut self.result.projection_store,
let mode = self.infer.binding_modes[slice]; );
(current, current_else) = self.pattern_match_binding( let mode = self.infer.binding_modes[slice];
id, (current, current_else) = self.pattern_match_binding(
mode, id,
next_place, mode,
(slice).into(), next_place,
current, (slice).into(),
current_else, current,
)?; current_else,
} )?;
}
} }
for (i, &pat) in suffix.iter().enumerate() { for (i, &pat) in suffix.iter().enumerate() {
let next_place = cond_place.project( let next_place = cond_place.project(
@ -391,10 +390,10 @@ impl MirLowerCtx<'_> {
return Ok((current, current_else)); return Ok((current, current_else));
} }
let (c, subst) = 'b: { let (c, subst) = 'b: {
if let Some(x) = self.infer.assoc_resolutions_for_pat(pattern) { if let Some(x) = self.infer.assoc_resolutions_for_pat(pattern)
if let AssocItemId::ConstId(c) = x.0 { && let AssocItemId::ConstId(c) = x.0
break 'b (c, x.1); {
} break 'b (c, x.1);
} }
if let ResolveValueResult::ValueNs(ValueNs::ConstId(c), _) = pr { if let ResolveValueResult::ValueNs(ValueNs::ConstId(c), _) = pr {
break 'b (c, Substitution::empty(Interner)); break 'b (c, Substitution::empty(Interner));

View file

@ -125,11 +125,10 @@ pub(crate) fn trait_solve_query(
alias: AliasTy::Projection(projection_ty), alias: AliasTy::Projection(projection_ty),
.. ..
}))) = &goal.value.goal.data(Interner) }))) = &goal.value.goal.data(Interner)
&& let TyKind::BoundVar(_) = projection_ty.self_type_parameter(db).kind(Interner)
{ {
if let TyKind::BoundVar(_) = projection_ty.self_type_parameter(db).kind(Interner) { // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible
// Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible return Some(Solution::Ambig(Guidance::Unknown));
return Some(Solution::Ambig(Guidance::Unknown));
}
} }
// Chalk see `UnevaluatedConst` as a unique concrete value, but we see it as an alias for another const. So // Chalk see `UnevaluatedConst` as a unique concrete value, but we see it as an alias for another const. So

View file

@ -333,13 +333,13 @@ impl FallibleTypeFolder<Interner> for UnevaluatedConstEvaluatorFolder<'_> {
constant: Const, constant: Const,
_outer_binder: DebruijnIndex, _outer_binder: DebruijnIndex,
) -> Result<Const, Self::Error> { ) -> Result<Const, Self::Error> {
if let chalk_ir::ConstValue::Concrete(c) = &constant.data(Interner).value { if let chalk_ir::ConstValue::Concrete(c) = &constant.data(Interner).value
if let ConstScalar::UnevaluatedConst(id, subst) = &c.interned { && let ConstScalar::UnevaluatedConst(id, subst) = &c.interned
if let Ok(eval) = self.db.const_eval(*id, subst.clone(), None) { {
return Ok(eval); if let Ok(eval) = self.db.const_eval(*id, subst.clone(), None) {
} else { return Ok(eval);
return Ok(unknown_const(constant.data(Interner).ty.clone())); } else {
} return Ok(unknown_const(constant.data(Interner).ty.clone()));
} }
} }
Ok(constant) Ok(constant)

View file

@ -604,13 +604,13 @@ impl<'db> AnyDiagnostic<'db> {
} }
} }
BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr } => { BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr } => {
if let Ok(source_ptr) = source_map.expr_syntax(if_expr) { if let Ok(source_ptr) = source_map.expr_syntax(if_expr)
if let Some(ptr) = source_ptr.value.cast::<ast::IfExpr>() { && let Some(ptr) = source_ptr.value.cast::<ast::IfExpr>()
return Some( {
RemoveUnnecessaryElse { if_expr: InFile::new(source_ptr.file_id, ptr) } return Some(
.into(), RemoveUnnecessaryElse { if_expr: InFile::new(source_ptr.file_id, ptr) }
); .into(),
} );
} }
} }
} }

View file

@ -1020,21 +1020,21 @@ fn emit_macro_def_diagnostics<'db>(
m: Macro, m: Macro,
) { ) {
let id = db.macro_def(m.id); let id = db.macro_def(m.id);
if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) { if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id)
if let Some(e) = expander.mac.err() { && let Some(e) = expander.mac.err()
let Some(ast) = id.ast_id().left() else { {
never!("declarative expander for non decl-macro: {:?}", e); let Some(ast) = id.ast_id().left() else {
return; never!("declarative expander for non decl-macro: {:?}", e);
}; return;
let krate = HasModule::krate(&m.id, db); };
let edition = krate.data(db).edition; let krate = HasModule::krate(&m.id, db);
emit_def_diagnostic_( let edition = krate.data(db).edition;
db, emit_def_diagnostic_(
acc, db,
&DefDiagnosticKind::MacroDefError { ast, message: e.to_string() }, acc,
edition, &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() },
); edition,
} );
} }
} }
@ -2564,10 +2564,10 @@ impl<'db> Param<'db> {
Callee::Closure(closure, _) => { Callee::Closure(closure, _) => {
let c = db.lookup_intern_closure(closure.into()); let c = db.lookup_intern_closure(closure.into());
let body = db.body(c.0); let body = db.body(c.0);
if let Expr::Closure { args, .. } = &body[c.1] { if let Expr::Closure { args, .. } = &body[c.1]
if let Pat::Bind { id, .. } = &body[args[self.idx]] { && let Pat::Bind { id, .. } = &body[args[self.idx]]
return Some(Local { parent: c.0, binding_id: *id }); {
} return Some(Local { parent: c.0, binding_id: *id });
} }
None None
} }
@ -2761,26 +2761,20 @@ impl EvaluatedConst {
pub fn render_debug(&self, db: &dyn HirDatabase) -> Result<String, MirEvalError> { pub fn render_debug(&self, db: &dyn HirDatabase) -> Result<String, MirEvalError> {
let data = self.const_.data(Interner); let data = self.const_.data(Interner);
if let TyKind::Scalar(s) = data.ty.kind(Interner) { if let TyKind::Scalar(s) = data.ty.kind(Interner)
if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) { && matches!(s, Scalar::Int(_) | Scalar::Uint(_))
if let hir_ty::ConstValue::Concrete(c) = &data.value { && let hir_ty::ConstValue::Concrete(c) = &data.value
if let hir_ty::ConstScalar::Bytes(b, _) = &c.interned { && let hir_ty::ConstScalar::Bytes(b, _) = &c.interned
let value = u128::from_le_bytes(mir::pad16(b, false)); {
let value_signed = let value = u128::from_le_bytes(mir::pad16(b, false));
i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_)))); let value_signed = i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_))));
let mut result = if let Scalar::Int(_) = s { let mut result =
value_signed.to_string() if let Scalar::Int(_) = s { value_signed.to_string() } else { value.to_string() };
} else { if value >= 10 {
value.to_string() format_to!(result, " ({value:#X})");
}; return Ok(result);
if value >= 10 { } else {
format_to!(result, " ({value:#X})"); return Ok(result);
return Ok(result);
} else {
return Ok(result);
}
}
}
} }
} }
mir::render_const_using_debug_impl(db, self.def, &self.const_) mir::render_const_using_debug_impl(db, self.def, &self.const_)
@ -4421,10 +4415,10 @@ impl Impl {
let impls = db.trait_impls_in_crate(id); let impls = db.trait_impls_in_crate(id);
all.extend(impls.for_trait(trait_.id).map(Self::from)) all.extend(impls.for_trait(trait_.id).map(Self::from))
} }
if let Some(block) = module.id.containing_block() { if let Some(block) = module.id.containing_block()
if let Some(trait_impls) = db.trait_impls_in_block(block) { && let Some(trait_impls) = db.trait_impls_in_block(block)
all.extend(trait_impls.for_trait(trait_.id).map(Self::from)); {
} all.extend(trait_impls.for_trait(trait_.id).map(Self::from));
} }
all all
} }

View file

@ -933,19 +933,18 @@ impl<'db> SemanticsImpl<'db> {
InFile::new(file.file_id, last), InFile::new(file.file_id, last),
false, false,
&mut |InFile { value: last, file_id: last_fid }, _ctx| { &mut |InFile { value: last, file_id: last_fid }, _ctx| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next()
if first_fid == last_fid { && first_fid == last_fid
if let Some(p) = first.parent() { && let Some(p) = first.parent()
let range = first.text_range().cover(last.text_range()); {
let node = find_root(&p) let range = first.text_range().cover(last.text_range());
.covering_element(range) let node = find_root(&p)
.ancestors() .covering_element(range)
.take_while(|it| it.text_range() == range) .ancestors()
.find_map(N::cast); .take_while(|it| it.text_range() == range)
if let Some(node) = node { .find_map(N::cast);
res.push(node); if let Some(node) = node {
} res.push(node);
}
} }
} }
}, },
@ -1391,10 +1390,10 @@ impl<'db> SemanticsImpl<'db> {
} }
})() })()
.is_none(); .is_none();
if was_not_remapped { if was_not_remapped
if let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) { && let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx)
return Some(b); {
} return Some(b);
} }
} }
} }
@ -2068,14 +2067,12 @@ impl<'db> SemanticsImpl<'db> {
break false; break false;
} }
if let Some(parent) = ast::Expr::cast(parent.clone()) { if let Some(parent) = ast::Expr::cast(parent.clone())
if let Some(ExprOrPatId::ExprId(expr_id)) = && let Some(ExprOrPatId::ExprId(expr_id)) =
source_map.node_expr(InFile { file_id, value: &parent }) source_map.node_expr(InFile { file_id, value: &parent })
{ && let Expr::Unsafe { .. } = body[expr_id]
if let Expr::Unsafe { .. } = body[expr_id] { {
break true; break true;
}
}
} }
let Some(parent_) = parent.parent() else { break false }; let Some(parent_) = parent.parent() else { break false };
@ -2354,32 +2351,30 @@ struct RenameConflictsVisitor<'a> {
impl RenameConflictsVisitor<'_> { impl RenameConflictsVisitor<'_> {
fn resolve_path(&mut self, node: ExprOrPatId, path: &Path) { fn resolve_path(&mut self, node: ExprOrPatId, path: &Path) {
if let Path::BarePath(path) = path { if let Path::BarePath(path) = path
if let Some(name) = path.as_ident() { && let Some(name) = path.as_ident()
if *name.symbol() == self.new_name { {
if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed( if *name.symbol() == self.new_name {
self.db, if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed(
name, self.db,
path, name,
self.body.expr_or_pat_path_hygiene(node), path,
self.to_be_renamed, self.body.expr_or_pat_path_hygiene(node),
) { self.to_be_renamed,
self.conflicts.insert(conflicting); ) {
} self.conflicts.insert(conflicting);
} else if *name.symbol() == self.old_name {
if let Some(conflicting) =
self.resolver.rename_will_conflict_with_another_variable(
self.db,
name,
path,
self.body.expr_or_pat_path_hygiene(node),
&self.new_name,
self.to_be_renamed,
)
{
self.conflicts.insert(conflicting);
}
} }
} else if *name.symbol() == self.old_name
&& let Some(conflicting) = self.resolver.rename_will_conflict_with_another_variable(
self.db,
name,
path,
self.body.expr_or_pat_path_hygiene(node),
&self.new_name,
self.to_be_renamed,
)
{
self.conflicts.insert(conflicting);
} }
} }
} }

View file

@ -995,11 +995,11 @@ impl<'db> SourceAnalyzer<'db> {
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
// trying to resolve foo::bar. // trying to resolve foo::bar.
if let Some(use_tree) = parent().and_then(ast::UseTree::cast) { if let Some(use_tree) = parent().and_then(ast::UseTree::cast)
if use_tree.coloncolon_token().is_some() { && use_tree.coloncolon_token().is_some()
return resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &store) {
.map(|it| (it, None)); return resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &store)
} .map(|it| (it, None));
} }
let meta_path = path let meta_path = path
@ -1035,24 +1035,19 @@ impl<'db> SourceAnalyzer<'db> {
// } // }
// ``` // ```
Some(it) if matches!(it, PathResolution::Def(ModuleDef::BuiltinType(_))) => { Some(it) if matches!(it, PathResolution::Def(ModuleDef::BuiltinType(_))) => {
if let Some(mod_path) = hir_path.mod_path() { if let Some(mod_path) = hir_path.mod_path()
if let Some(ModuleDefId::ModuleId(id)) = && let Some(ModuleDefId::ModuleId(id)) =
self.resolver.resolve_module_path_in_items(db, mod_path).take_types() self.resolver.resolve_module_path_in_items(db, mod_path).take_types()
{
let parent_hir_name = parent_hir_path.segments().get(1).map(|it| it.name);
let module = crate::Module { id };
if module
.scope(db, None)
.into_iter()
.any(|(name, _)| Some(&name) == parent_hir_name)
{ {
let parent_hir_name = return Some((PathResolution::Def(ModuleDef::Module(module)), None));
parent_hir_path.segments().get(1).map(|it| it.name); };
let module = crate::Module { id };
if module
.scope(db, None)
.into_iter()
.any(|(name, _)| Some(&name) == parent_hir_name)
{
return Some((
PathResolution::Def(ModuleDef::Module(module)),
None,
));
};
}
} }
Some((it, None)) Some((it, None))
} }
@ -1282,22 +1277,22 @@ impl<'db> SourceAnalyzer<'db> {
db: &'db dyn HirDatabase, db: &'db dyn HirDatabase,
macro_expr: InFile<&ast::MacroExpr>, macro_expr: InFile<&ast::MacroExpr>,
) -> bool { ) -> bool {
if let Some((def, body, sm, Some(infer))) = self.body_() { if let Some((def, body, sm, Some(infer))) = self.body_()
if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr) { && let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr)
let mut is_unsafe = false; {
let mut walk_expr = |expr_id| { let mut is_unsafe = false;
unsafe_operations(db, infer, def, body, expr_id, &mut |inside_unsafe_block| { let mut walk_expr = |expr_id| {
is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No unsafe_operations(db, infer, def, body, expr_id, &mut |inside_unsafe_block| {
}) is_unsafe |= inside_unsafe_block == InsideUnsafeBlock::No
}; })
match expanded_expr { };
ExprOrPatId::ExprId(expanded_expr) => walk_expr(expanded_expr), match expanded_expr {
ExprOrPatId::PatId(expanded_pat) => { ExprOrPatId::ExprId(expanded_expr) => walk_expr(expanded_expr),
body.walk_exprs_in_pat(expanded_pat, &mut walk_expr) ExprOrPatId::PatId(expanded_pat) => {
} body.walk_exprs_in_pat(expanded_pat, &mut walk_expr)
} }
return is_unsafe;
} }
return is_unsafe;
} }
false false
} }
@ -1575,12 +1570,11 @@ fn resolve_hir_path_(
// If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type
// within the trait's associated types. // within the trait's associated types.
if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) { if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty)
if let Some(type_alias_id) = && let Some(type_alias_id) =
trait_id.trait_items(db).associated_type_by_name(unresolved.name) trait_id.trait_items(db).associated_type_by_name(unresolved.name)
{ {
return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
}
} }
let res = match ty { let res = match ty {
@ -1726,12 +1720,11 @@ fn resolve_hir_path_qualifier(
// If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type
// within the trait's associated types. // within the trait's associated types.
if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) { if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty)
if let Some(type_alias_id) = && let Some(type_alias_id) =
trait_id.trait_items(db).associated_type_by_name(unresolved.name) trait_id.trait_items(db).associated_type_by_name(unresolved.name)
{ {
return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into())); return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
}
} }
let res = match ty { let res = match ty {

View file

@ -122,10 +122,10 @@ impl<'db> LookupTable<'db> {
} }
// Collapse suggestions if there are many // Collapse suggestions if there are many
if let Some(res) = &res { if let Some(res) = &res
if res.len() > self.many_threshold { && res.len() > self.many_threshold
return Some(vec![Expr::Many(ty.clone())]); {
} return Some(vec![Expr::Many(ty.clone())]);
} }
res res
@ -160,10 +160,10 @@ impl<'db> LookupTable<'db> {
} }
// Collapse suggestions if there are many // Collapse suggestions if there are many
if let Some(res) = &res { if let Some(res) = &res
if res.len() > self.many_threshold { && res.len() > self.many_threshold
return Some(vec![Expr::Many(ty.clone())]); {
} return Some(vec![Expr::Many(ty.clone())]);
} }
res res

View file

@ -336,10 +336,10 @@ impl<'db> Expr<'db> {
if let Expr::Method { func, params, .. } = self { if let Expr::Method { func, params, .. } = self {
res.extend(params.iter().flat_map(|it| it.traits_used(db))); res.extend(params.iter().flat_map(|it| it.traits_used(db)));
if let Some(it) = func.as_assoc_item(db) { if let Some(it) = func.as_assoc_item(db)
if let Some(it) = it.container_or_implemented_trait(db) { && let Some(it) = it.container_or_implemented_trait(db)
res.push(it); {
} res.push(it);
} }
} }

View file

@ -82,10 +82,10 @@ fn fetch_borrowed_types(node: &ast::Adt) -> Option<Vec<ast::RefType>> {
record_field_list record_field_list
.fields() .fields()
.filter_map(|r_field| { .filter_map(|r_field| {
if let ast::Type::RefType(ref_type) = r_field.ty()? { if let ast::Type::RefType(ref_type) = r_field.ty()?
if ref_type.lifetime().is_none() { && ref_type.lifetime().is_none()
return Some(ref_type); {
} return Some(ref_type);
} }
None None
@ -102,10 +102,10 @@ fn find_ref_types_from_field_list(field_list: &ast::FieldList) -> Option<Vec<ast
ast::FieldList::RecordFieldList(record_list) => record_list ast::FieldList::RecordFieldList(record_list) => record_list
.fields() .fields()
.filter_map(|f| { .filter_map(|f| {
if let ast::Type::RefType(ref_type) = f.ty()? { if let ast::Type::RefType(ref_type) = f.ty()?
if ref_type.lifetime().is_none() { && ref_type.lifetime().is_none()
return Some(ref_type); {
} return Some(ref_type);
} }
None None
@ -114,10 +114,10 @@ fn find_ref_types_from_field_list(field_list: &ast::FieldList) -> Option<Vec<ast
ast::FieldList::TupleFieldList(tuple_field_list) => tuple_field_list ast::FieldList::TupleFieldList(tuple_field_list) => tuple_field_list
.fields() .fields()
.filter_map(|f| { .filter_map(|f| {
if let ast::Type::RefType(ref_type) = f.ty()? { if let ast::Type::RefType(ref_type) = f.ty()?
if ref_type.lifetime().is_none() { && ref_type.lifetime().is_none()
return Some(ref_type); {
} return Some(ref_type);
} }
None None

View file

@ -201,14 +201,12 @@ fn add_missing_impl_members_inner(
if let Some(cap) = ctx.config.snippet_cap { if let Some(cap) = ctx.config.snippet_cap {
let mut placeholder = None; let mut placeholder = None;
if let DefaultMethods::No = mode { if let DefaultMethods::No = mode
if let Some(ast::AssocItem::Fn(func)) = &first_new_item { && let Some(ast::AssocItem::Fn(func)) = &first_new_item
if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) && let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
&& m.syntax().text() == "todo!()" && m.syntax().text() == "todo!()"
{ {
placeholder = Some(m); placeholder = Some(m);
}
}
} }
if let Some(macro_call) = placeholder { if let Some(macro_call) = placeholder {

View file

@ -207,10 +207,10 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
// negate all tail expressions in the closure body // negate all tail expressions in the closure body
let tail_cb = &mut |e: &_| tail_cb_impl(&mut editor, &make, e); let tail_cb = &mut |e: &_| tail_cb_impl(&mut editor, &make, e);
walk_expr(&closure_body, &mut |expr| { walk_expr(&closure_body, &mut |expr| {
if let ast::Expr::ReturnExpr(ret_expr) = expr { if let ast::Expr::ReturnExpr(ret_expr) = expr
if let Some(ret_expr_arg) = &ret_expr.expr() { && let Some(ret_expr_arg) = &ret_expr.expr()
for_each_tail_expr(ret_expr_arg, tail_cb); {
} for_each_tail_expr(ret_expr_arg, tail_cb);
} }
}); });
for_each_tail_expr(&closure_body, tail_cb); for_each_tail_expr(&closure_body, tail_cb);

View file

@ -86,12 +86,11 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
e @ ast::Expr::CallExpr(_) => Some(e.clone()), e @ ast::Expr::CallExpr(_) => Some(e.clone()),
_ => None, _ => None,
}; };
if let Some(ast::Expr::CallExpr(call)) = e { if let Some(ast::Expr::CallExpr(call)) = e
if let Some(arg_list) = call.arg_list() { && let Some(arg_list) = call.arg_list()
if let Some(arg) = arg_list.args().next() { && let Some(arg) = arg_list.args().next()
editor.replace(call.syntax(), arg.syntax()); {
} editor.replace(call.syntax(), arg.syntax());
}
} }
}); });
let edit = editor.finish(); let edit = editor.finish();
@ -276,12 +275,12 @@ fn is_invalid_body(
e @ ast::Expr::CallExpr(_) => Some(e.clone()), e @ ast::Expr::CallExpr(_) => Some(e.clone()),
_ => None, _ => None,
}; };
if let Some(ast::Expr::CallExpr(call)) = e { if let Some(ast::Expr::CallExpr(call)) = e
if let Some(ast::Expr::PathExpr(p)) = call.expr() { && let Some(ast::Expr::PathExpr(p)) = call.expr()
let res = p.path().and_then(|p| sema.resolve_path(&p)); {
if let Some(hir::PathResolution::Def(hir::ModuleDef::Variant(v))) = res { let res = p.path().and_then(|p| sema.resolve_path(&p));
return invalid |= v != some_variant; if let Some(hir::PathResolution::Def(hir::ModuleDef::Variant(v))) = res {
} return invalid |= v != some_variant;
} }
} }
invalid = true invalid = true

View file

@ -101,21 +101,21 @@ pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>)
// but we need to locate `AstPtr`s inside the body. // but we need to locate `AstPtr`s inside the body.
let mut wrap_body_in_block = true; let mut wrap_body_in_block = true;
if let ast::Expr::BlockExpr(block) = &body { if let ast::Expr::BlockExpr(block) = &body {
if let Some(async_token) = block.async_token() { if let Some(async_token) = block.async_token()
if !is_async { && !is_async
is_async = true; {
ret_ty = ret_ty.future_output(ctx.db())?; is_async = true;
let token_idx = async_token.index(); ret_ty = ret_ty.future_output(ctx.db())?;
let whitespace_tokens_after_count = async_token let token_idx = async_token.index();
.siblings_with_tokens(Direction::Next) let whitespace_tokens_after_count = async_token
.skip(1) .siblings_with_tokens(Direction::Next)
.take_while(|token| token.kind() == SyntaxKind::WHITESPACE) .skip(1)
.count(); .take_while(|token| token.kind() == SyntaxKind::WHITESPACE)
body.syntax().splice_children( .count();
token_idx..token_idx + whitespace_tokens_after_count + 1, body.syntax().splice_children(
Vec::new(), token_idx..token_idx + whitespace_tokens_after_count + 1,
); Vec::new(),
} );
} }
if let Some(gen_token) = block.gen_token() { if let Some(gen_token) = block.gen_token() {
is_gen = true; is_gen = true;
@ -513,10 +513,10 @@ fn capture_as_arg(ctx: &AssistContext<'_>, capture: &ClosureCapture) -> ast::Exp
CaptureKind::MutableRef | CaptureKind::UniqueSharedRef => true, CaptureKind::MutableRef | CaptureKind::UniqueSharedRef => true,
CaptureKind::Move => return place, CaptureKind::Move => return place,
}; };
if let ast::Expr::PrefixExpr(expr) = &place { if let ast::Expr::PrefixExpr(expr) = &place
if expr.op_kind() == Some(ast::UnaryOp::Deref) { && expr.op_kind() == Some(ast::UnaryOp::Deref)
return expr.expr().expect("`display_place_source_code()` produced an invalid expr"); {
} return expr.expr().expect("`display_place_source_code()` produced an invalid expr");
} }
make::expr_ref(place, needs_mut) make::expr_ref(place, needs_mut)
} }
@ -642,11 +642,11 @@ fn peel_blocks_and_refs_and_parens(mut expr: ast::Expr) -> ast::Expr {
expr = ast::Expr::cast(parent).unwrap(); expr = ast::Expr::cast(parent).unwrap();
continue; continue;
} }
if let Some(stmt_list) = ast::StmtList::cast(parent) { if let Some(stmt_list) = ast::StmtList::cast(parent)
if let Some(block) = stmt_list.syntax().parent().and_then(ast::BlockExpr::cast) { && let Some(block) = stmt_list.syntax().parent().and_then(ast::BlockExpr::cast)
expr = ast::Expr::BlockExpr(block); {
continue; expr = ast::Expr::BlockExpr(block);
} continue;
} }
break; break;
} }
@ -662,12 +662,11 @@ fn expr_of_pat(pat: ast::Pat) -> Option<ast::Expr> {
if let Some(let_stmt) = ast::LetStmt::cast(ancestor.clone()) { if let Some(let_stmt) = ast::LetStmt::cast(ancestor.clone()) {
break 'find_expr let_stmt.initializer(); break 'find_expr let_stmt.initializer();
} }
if ast::MatchArm::can_cast(ancestor.kind()) { if ast::MatchArm::can_cast(ancestor.kind())
if let Some(match_) = && let Some(match_) =
ancestor.parent().and_then(|it| it.parent()).and_then(ast::MatchExpr::cast) ancestor.parent().and_then(|it| it.parent()).and_then(ast::MatchExpr::cast)
{ {
break 'find_expr match_.expr(); break 'find_expr match_.expr();
}
} }
if ast::ExprStmt::can_cast(ancestor.kind()) { if ast::ExprStmt::can_cast(ancestor.kind()) {
break; break;

View file

@ -50,10 +50,10 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_>
let associated_items = impl_.assoc_item_list()?; let associated_items = impl_.assoc_item_list()?;
let from_fn = associated_items.assoc_items().find_map(|item| { let from_fn = associated_items.assoc_items().find_map(|item| {
if let ast::AssocItem::Fn(f) = item { if let ast::AssocItem::Fn(f) = item
if f.name()?.text() == "from" { && f.name()?.text() == "from"
return Some(f); {
} return Some(f);
}; };
None None
})?; })?;
@ -110,12 +110,11 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_>
)) ))
.clone_for_update(); .clone_for_update();
if let Some(cap) = ctx.config.snippet_cap { if let Some(cap) = ctx.config.snippet_cap
if let ast::AssocItem::TypeAlias(type_alias) = &error_type { && let ast::AssocItem::TypeAlias(type_alias) = &error_type
if let Some(ty) = type_alias.ty() { && let Some(ty) = type_alias.ty()
builder.add_placeholder_snippet(cap, ty); {
} builder.add_placeholder_snippet(cap, ty);
}
} }
associated_items.add_item_at_start(error_type); associated_items.add_item_at_start(error_type);

View file

@ -65,10 +65,10 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -
}; };
let into_fn = impl_.assoc_item_list()?.assoc_items().find_map(|item| { let into_fn = impl_.assoc_item_list()?.assoc_items().find_map(|item| {
if let ast::AssocItem::Fn(f) = item { if let ast::AssocItem::Fn(f) = item
if f.name()?.text() == "into" { && f.name()?.text() == "into"
return Some(f); {
} return Some(f);
}; };
None None
})?; })?;

View file

@ -265,10 +265,10 @@ fn replace_body_return_values(body: ast::Expr, struct_name: &str) {
let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e); let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e);
walk_expr(&body, &mut |expr| { walk_expr(&body, &mut |expr| {
if let ast::Expr::ReturnExpr(ret_expr) = expr { if let ast::Expr::ReturnExpr(ret_expr) = expr
if let Some(ret_expr_arg) = &ret_expr.expr() { && let Some(ret_expr_arg) = &ret_expr.expr()
for_each_tail_expr(ret_expr_arg, tail_cb); {
} for_each_tail_expr(ret_expr_arg, tail_cb);
} }
}); });
for_each_tail_expr(&body, tail_cb); for_each_tail_expr(&body, tail_cb);

View file

@ -192,7 +192,7 @@ fn edit_struct_references(
).syntax().clone() ).syntax().clone()
) )
}, },
_ => return None, _ => None,
} }
} }
}; };

View file

@ -100,10 +100,10 @@ fn is_bool_literal_expr(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
expr: &ast::Expr, expr: &ast::Expr,
) -> Option<ArmBodyExpression> { ) -> Option<ArmBodyExpression> {
if let ast::Expr::Literal(lit) = expr { if let ast::Expr::Literal(lit) = expr
if let ast::LiteralKind::Bool(b) = lit.kind() { && let ast::LiteralKind::Bool(b) = lit.kind()
return Some(ArmBodyExpression::Literal(b)); {
} return Some(ArmBodyExpression::Literal(b));
} }
if !sema.type_of_expr(expr)?.original.is_bool() { if !sema.type_of_expr(expr)?.original.is_bool() {

View file

@ -106,73 +106,73 @@ pub(crate) fn desugar_try_expr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
}, },
); );
if let Some(let_stmt) = try_expr.syntax().parent().and_then(ast::LetStmt::cast) { if let Some(let_stmt) = try_expr.syntax().parent().and_then(ast::LetStmt::cast)
if let_stmt.let_else().is_none() { && let_stmt.let_else().is_none()
let pat = let_stmt.pat()?; {
acc.add( let pat = let_stmt.pat()?;
AssistId::refactor_rewrite("desugar_try_expr_let_else"), acc.add(
"Replace try expression with let else", AssistId::refactor_rewrite("desugar_try_expr_let_else"),
target, "Replace try expression with let else",
|builder| { target,
let make = SyntaxFactory::with_mappings(); |builder| {
let mut editor = builder.make_editor(let_stmt.syntax()); let make = SyntaxFactory::with_mappings();
let mut editor = builder.make_editor(let_stmt.syntax());
let indent_level = IndentLevel::from_node(let_stmt.syntax()); let indent_level = IndentLevel::from_node(let_stmt.syntax());
let new_let_stmt = make.let_else_stmt( let new_let_stmt = make.let_else_stmt(
try_enum.happy_pattern(pat), try_enum.happy_pattern(pat),
let_stmt.ty(), let_stmt.ty(),
expr, expr,
make.block_expr( make.block_expr(
iter::once( iter::once(
make.expr_stmt( make.expr_stmt(
make.expr_return(Some(match try_enum { make.expr_return(Some(match try_enum {
TryEnum::Option => make.expr_path(make.ident_path("None")), TryEnum::Option => make.expr_path(make.ident_path("None")),
TryEnum::Result => make TryEnum::Result => make
.expr_call( .expr_call(
make.expr_path(make.ident_path("Err")), make.expr_path(make.ident_path("Err")),
make.arg_list(iter::once( make.arg_list(iter::once(
match ctx.config.expr_fill_default { match ctx.config.expr_fill_default {
ExprFillDefaultMode::Todo => make ExprFillDefaultMode::Todo => make
.expr_macro( .expr_macro(
make.ident_path("todo"), make.ident_path("todo"),
make.token_tree( make.token_tree(
syntax::SyntaxKind::L_PAREN, syntax::SyntaxKind::L_PAREN,
[], [],
), ),
) )
.into(), .into(),
ExprFillDefaultMode::Underscore => { ExprFillDefaultMode::Underscore => {
make.expr_underscore().into() make.expr_underscore().into()
} }
ExprFillDefaultMode::Default => make ExprFillDefaultMode::Default => make
.expr_macro( .expr_macro(
make.ident_path("todo"), make.ident_path("todo"),
make.token_tree( make.token_tree(
syntax::SyntaxKind::L_PAREN, syntax::SyntaxKind::L_PAREN,
[], [],
), ),
) )
.into(), .into(),
}, },
)), )),
) )
.into(), .into(),
})) }))
.indent(indent_level + 1) .indent(indent_level + 1)
.into(),
)
.into(), .into(),
), )
None, .into(),
) ),
.indent(indent_level), None,
); )
editor.replace(let_stmt.syntax(), new_let_stmt.syntax()); .indent(indent_level),
editor.add_mappings(make.finish_with_mappings()); );
builder.add_file_edits(ctx.vfs_file_id(), editor); editor.replace(let_stmt.syntax(), new_let_stmt.syntax());
}, editor.add_mappings(make.finish_with_mappings());
); builder.add_file_edits(ctx.vfs_file_id(), editor);
} },
);
} }
Some(()) Some(())
} }

View file

@ -272,16 +272,16 @@ impl Refs {
.clone() .clone()
.into_iter() .into_iter()
.filter(|r| { .filter(|r| {
if let Definition::Trait(tr) = r.def { if let Definition::Trait(tr) = r.def
if tr.items(ctx.db()).into_iter().any(|ai| { && tr.items(ctx.db()).into_iter().any(|ai| {
if let AssocItem::Function(f) = ai { if let AssocItem::Function(f) = ai {
def_is_referenced_in(Definition::Function(f), ctx) def_is_referenced_in(Definition::Function(f), ctx)
} else { } else {
false false
} }
}) { })
return true; {
} return true;
} }
def_is_referenced_in(r.def, ctx) def_is_referenced_in(r.def, ctx)

View file

@ -175,10 +175,10 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let fn_def = format_function(ctx, module, &fun, old_indent).clone_for_update(); let fn_def = format_function(ctx, module, &fun, old_indent).clone_for_update();
if let Some(cap) = ctx.config.snippet_cap { if let Some(cap) = ctx.config.snippet_cap
if let Some(name) = fn_def.name() { && let Some(name) = fn_def.name()
builder.add_tabstop_before(cap, name); {
} builder.add_tabstop_before(cap, name);
} }
let fn_def = match fun.self_param_adt(ctx) { let fn_def = match fun.self_param_adt(ctx) {
@ -289,10 +289,10 @@ fn extraction_target(node: &SyntaxNode, selection_range: TextRange) -> Option<Fu
// Covering element returned the parent block of one or multiple statements that have been selected // Covering element returned the parent block of one or multiple statements that have been selected
if let Some(stmt_list) = ast::StmtList::cast(node.clone()) { if let Some(stmt_list) = ast::StmtList::cast(node.clone()) {
if let Some(block_expr) = stmt_list.syntax().parent().and_then(ast::BlockExpr::cast) { if let Some(block_expr) = stmt_list.syntax().parent().and_then(ast::BlockExpr::cast)
if block_expr.syntax().text_range() == selection_range { && block_expr.syntax().text_range() == selection_range
return FunctionBody::from_expr(block_expr.into()); {
} return FunctionBody::from_expr(block_expr.into());
} }
// Extract the full statements. // Extract the full statements.
@ -915,11 +915,10 @@ impl FunctionBody {
ast::Fn(fn_) => { ast::Fn(fn_) => {
let func = sema.to_def(&fn_)?; let func = sema.to_def(&fn_)?;
let mut ret_ty = func.ret_type(sema.db); let mut ret_ty = func.ret_type(sema.db);
if func.is_async(sema.db) { if func.is_async(sema.db)
if let Some(async_ret) = func.async_ret_type(sema.db) { && let Some(async_ret) = func.async_ret_type(sema.db) {
ret_ty = async_ret; ret_ty = async_ret;
} }
}
(fn_.const_token().is_some(), fn_.body().map(ast::Expr::BlockExpr), Some(ret_ty)) (fn_.const_token().is_some(), fn_.body().map(ast::Expr::BlockExpr), Some(ret_ty))
}, },
ast::Static(statik) => { ast::Static(statik) => {
@ -1172,19 +1171,19 @@ impl GenericParent {
/// Search `parent`'s ancestors for items with potentially applicable generic parameters /// Search `parent`'s ancestors for items with potentially applicable generic parameters
fn generic_parents(parent: &SyntaxNode) -> Vec<GenericParent> { fn generic_parents(parent: &SyntaxNode) -> Vec<GenericParent> {
let mut list = Vec::new(); let mut list = Vec::new();
if let Some(parent_item) = parent.ancestors().find_map(ast::Item::cast) { if let Some(parent_item) = parent.ancestors().find_map(ast::Item::cast)
if let ast::Item::Fn(ref fn_) = parent_item { && let ast::Item::Fn(ref fn_) = parent_item
if let Some(parent_parent) = {
parent_item.syntax().parent().and_then(|it| it.parent()).and_then(ast::Item::cast) if let Some(parent_parent) =
{ parent_item.syntax().parent().and_then(|it| it.parent()).and_then(ast::Item::cast)
match parent_parent { {
ast::Item::Impl(impl_) => list.push(GenericParent::Impl(impl_)), match parent_parent {
ast::Item::Trait(trait_) => list.push(GenericParent::Trait(trait_)), ast::Item::Impl(impl_) => list.push(GenericParent::Impl(impl_)),
_ => (), ast::Item::Trait(trait_) => list.push(GenericParent::Trait(trait_)),
} _ => (),
} }
list.push(GenericParent::Fn(fn_.clone()));
} }
list.push(GenericParent::Fn(fn_.clone()));
} }
list list
} }
@ -1337,10 +1336,10 @@ fn locals_defined_in_body(
// see https://github.com/rust-lang/rust-analyzer/pull/7535#discussion_r570048550 // see https://github.com/rust-lang/rust-analyzer/pull/7535#discussion_r570048550
let mut res = FxIndexSet::default(); let mut res = FxIndexSet::default();
body.walk_pat(&mut |pat| { body.walk_pat(&mut |pat| {
if let ast::Pat::IdentPat(pat) = pat { if let ast::Pat::IdentPat(pat) = pat
if let Some(local) = sema.to_def(&pat) { && let Some(local) = sema.to_def(&pat)
res.insert(local); {
} res.insert(local);
} }
}); });
res res
@ -1445,11 +1444,11 @@ fn impl_type_name(impl_node: &ast::Impl) -> Option<String> {
fn fixup_call_site(builder: &mut SourceChangeBuilder, body: &FunctionBody) { fn fixup_call_site(builder: &mut SourceChangeBuilder, body: &FunctionBody) {
let parent_match_arm = body.parent().and_then(ast::MatchArm::cast); let parent_match_arm = body.parent().and_then(ast::MatchArm::cast);
if let Some(parent_match_arm) = parent_match_arm { if let Some(parent_match_arm) = parent_match_arm
if parent_match_arm.comma_token().is_none() { && parent_match_arm.comma_token().is_none()
let parent_match_arm = builder.make_mut(parent_match_arm); {
ted::append_child_raw(parent_match_arm.syntax(), make::token(T![,])); let parent_match_arm = builder.make_mut(parent_match_arm);
} ted::append_child_raw(parent_match_arm.syntax(), make::token(T![,]));
} }
} }
@ -2120,30 +2119,30 @@ fn update_external_control_flow(handler: &FlowHandler<'_>, syntax: &SyntaxNode)
_ => {} _ => {}
}, },
WalkEvent::Leave(e) => { WalkEvent::Leave(e) => {
if nested_scope.is_none() { if nested_scope.is_none()
if let Some(expr) = ast::Expr::cast(e.clone()) { && let Some(expr) = ast::Expr::cast(e.clone())
match expr { {
ast::Expr::ReturnExpr(return_expr) => { match expr {
let expr = return_expr.expr(); ast::Expr::ReturnExpr(return_expr) => {
if let Some(replacement) = make_rewritten_flow(handler, expr) { let expr = return_expr.expr();
ted::replace(return_expr.syntax(), replacement.syntax()) if let Some(replacement) = make_rewritten_flow(handler, expr) {
} ted::replace(return_expr.syntax(), replacement.syntax())
} }
ast::Expr::BreakExpr(break_expr) if nested_loop.is_none() => { }
let expr = break_expr.expr(); ast::Expr::BreakExpr(break_expr) if nested_loop.is_none() => {
if let Some(replacement) = make_rewritten_flow(handler, expr) { let expr = break_expr.expr();
ted::replace(break_expr.syntax(), replacement.syntax()) if let Some(replacement) = make_rewritten_flow(handler, expr) {
} ted::replace(break_expr.syntax(), replacement.syntax())
} }
ast::Expr::ContinueExpr(continue_expr) if nested_loop.is_none() => { }
if let Some(replacement) = make_rewritten_flow(handler, None) { ast::Expr::ContinueExpr(continue_expr) if nested_loop.is_none() => {
ted::replace(continue_expr.syntax(), replacement.syntax()) if let Some(replacement) = make_rewritten_flow(handler, None) {
} ted::replace(continue_expr.syntax(), replacement.syntax())
}
_ => {
// do nothing
} }
} }
_ => {
// do nothing
}
} }
} }

View file

@ -69,13 +69,12 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let mut impl_parent: Option<ast::Impl> = None; let mut impl_parent: Option<ast::Impl> = None;
let mut impl_child_count: usize = 0; let mut impl_child_count: usize = 0;
if let Some(parent_assoc_list) = node.parent() { if let Some(parent_assoc_list) = node.parent()
if let Some(parent_impl) = parent_assoc_list.parent() { && let Some(parent_impl) = parent_assoc_list.parent()
if let Some(impl_) = ast::Impl::cast(parent_impl) { && let Some(impl_) = ast::Impl::cast(parent_impl)
impl_child_count = parent_assoc_list.children().count(); {
impl_parent = Some(impl_); impl_child_count = parent_assoc_list.children().count();
} impl_parent = Some(impl_);
}
} }
let mut curr_parent_module: Option<ast::Module> = None; let mut curr_parent_module: Option<ast::Module> = None;
@ -436,10 +435,10 @@ impl Module {
} }
}) })
.for_each(|(node, def)| { .for_each(|(node, def)| {
if node_set.insert(node.to_string()) { if node_set.insert(node.to_string())
if let Some(import) = self.process_def_in_sel(def, &node, &module, ctx) { && let Some(import) = self.process_def_in_sel(def, &node, &module, ctx)
check_intersection_and_push(&mut imports_to_remove, import); {
} check_intersection_and_push(&mut imports_to_remove, import);
} }
}) })
} }
@ -542,15 +541,16 @@ impl Module {
import_path_to_be_removed = Some(text_range); import_path_to_be_removed = Some(text_range);
} }
if def_in_mod && def_out_sel { if def_in_mod
if let Some(first_path_in_use_tree) = use_tree_str.last() { && def_out_sel
let first_path_in_use_tree_str = first_path_in_use_tree.to_string(); && let Some(first_path_in_use_tree) = use_tree_str.last()
if !first_path_in_use_tree_str.contains("super") {
&& !first_path_in_use_tree_str.contains("crate") let first_path_in_use_tree_str = first_path_in_use_tree.to_string();
{ if !first_path_in_use_tree_str.contains("super")
let super_path = make::ext::ident_path("super"); && !first_path_in_use_tree_str.contains("crate")
use_tree_str.push(super_path); {
} let super_path = make::ext::ident_path("super");
use_tree_str.push(super_path);
} }
} }
@ -563,12 +563,11 @@ impl Module {
if let Some(mut use_tree_paths) = use_tree_paths { if let Some(mut use_tree_paths) = use_tree_paths {
use_tree_paths.reverse(); use_tree_paths.reverse();
if uses_exist_out_sel || !uses_exist_in_sel || !def_in_mod || !def_out_sel { if (uses_exist_out_sel || !uses_exist_in_sel || !def_in_mod || !def_out_sel)
if let Some(first_path_in_use_tree) = use_tree_paths.first() { && let Some(first_path_in_use_tree) = use_tree_paths.first()
if first_path_in_use_tree.to_string().contains("super") { && first_path_in_use_tree.to_string().contains("super")
use_tree_paths.insert(0, make::ext::ident_path("super")); {
} use_tree_paths.insert(0, make::ext::ident_path("super"));
}
} }
let is_item = matches!( let is_item = matches!(
@ -691,11 +690,9 @@ fn check_def_in_mod_and_out_sel(
_ => source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id, _ => source.file_id.original_file(ctx.db()).file_id(ctx.db()) == curr_file_id,
}; };
if have_same_parent { if have_same_parent && let ModuleSource::Module(module_) = source.value {
if let ModuleSource::Module(module_) = source.value { let in_sel = !selection_range.contains_range(module_.syntax().text_range());
let in_sel = !selection_range.contains_range(module_.syntax().text_range()); return (have_same_parent, in_sel);
return (have_same_parent, in_sel);
}
} }
return (have_same_parent, false); return (have_same_parent, false);
@ -772,12 +769,12 @@ fn get_use_tree_paths_from_path(
.filter(|x| x.to_string() != path.to_string()) .filter(|x| x.to_string() != path.to_string())
.filter_map(ast::UseTree::cast) .filter_map(ast::UseTree::cast)
.find_map(|use_tree| { .find_map(|use_tree| {
if let Some(upper_tree_path) = use_tree.path() { if let Some(upper_tree_path) = use_tree.path()
if upper_tree_path.to_string() != path.to_string() { && upper_tree_path.to_string() != path.to_string()
use_tree_str.push(upper_tree_path.clone()); {
get_use_tree_paths_from_path(upper_tree_path, use_tree_str); use_tree_str.push(upper_tree_path.clone());
return Some(use_tree); get_use_tree_paths_from_path(upper_tree_path, use_tree_str);
} return Some(use_tree);
} }
None None
})?; })?;
@ -786,11 +783,11 @@ fn get_use_tree_paths_from_path(
} }
fn add_change_vis(vis: Option<ast::Visibility>, node_or_token_opt: Option<syntax::SyntaxElement>) { fn add_change_vis(vis: Option<ast::Visibility>, node_or_token_opt: Option<syntax::SyntaxElement>) {
if vis.is_none() { if vis.is_none()
if let Some(node_or_token) = node_or_token_opt { && let Some(node_or_token) = node_or_token_opt
let pub_crate_vis = make::visibility_pub_crate().clone_for_update(); {
ted::insert(ted::Position::before(node_or_token), pub_crate_vis.syntax()); let pub_crate_vis = make::visibility_pub_crate().clone_for_update();
} ted::insert(ted::Position::before(node_or_token), pub_crate_vis.syntax());
} }
} }

View file

@ -215,12 +215,12 @@ fn tag_generics_in_variant(ty: &ast::Type, generics: &mut [(ast::GenericParam, b
ast::GenericParam::LifetimeParam(lt) ast::GenericParam::LifetimeParam(lt)
if matches!(token.kind(), T![lifetime_ident]) => if matches!(token.kind(), T![lifetime_ident]) =>
{ {
if let Some(lt) = lt.lifetime() { if let Some(lt) = lt.lifetime()
if lt.text().as_str() == token.text() { && lt.text().as_str() == token.text()
*tag = true; {
tagged_one = true; *tag = true;
break; tagged_one = true;
} break;
} }
} }
param if matches!(token.kind(), T![ident]) => { param if matches!(token.kind(), T![ident]) => {

View file

@ -72,10 +72,10 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let ty_alias = make::ty_alias("Type", generic_params, None, None, Some((ty, None))) let ty_alias = make::ty_alias("Type", generic_params, None, None, Some((ty, None)))
.clone_for_update(); .clone_for_update();
if let Some(cap) = ctx.config.snippet_cap { if let Some(cap) = ctx.config.snippet_cap
if let Some(name) = ty_alias.name() { && let Some(name) = ty_alias.name()
edit.add_annotation(name.syntax(), builder.make_tabstop_before(cap)); {
} edit.add_annotation(name.syntax(), builder.make_tabstop_before(cap));
} }
let indent = IndentLevel::from_node(node); let indent = IndentLevel::from_node(node);
@ -111,17 +111,17 @@ fn collect_used_generics<'gp>(
match ty { match ty {
ast::Type::PathType(ty) => { ast::Type::PathType(ty) => {
if let Some(path) = ty.path() { if let Some(path) = ty.path() {
if let Some(name_ref) = path.as_single_name_ref() { if let Some(name_ref) = path.as_single_name_ref()
if let Some(param) = known_generics.iter().find(|gp| { && let Some(param) = known_generics.iter().find(|gp| {
match gp { match gp {
ast::GenericParam::ConstParam(cp) => cp.name(), ast::GenericParam::ConstParam(cp) => cp.name(),
ast::GenericParam::TypeParam(tp) => tp.name(), ast::GenericParam::TypeParam(tp) => tp.name(),
_ => None, _ => None,
} }
.is_some_and(|n| n.text() == name_ref.text()) .is_some_and(|n| n.text() == name_ref.text())
}) { })
generics.push(param); {
} generics.push(param);
} }
generics.extend( generics.extend(
path.segments() path.segments()
@ -160,20 +160,18 @@ fn collect_used_generics<'gp>(
.and_then(|lt| known_generics.iter().find(find_lifetime(&lt.text()))), .and_then(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
), ),
ast::Type::ArrayType(ar) => { ast::Type::ArrayType(ar) => {
if let Some(ast::Expr::PathExpr(p)) = ar.const_arg().and_then(|x| x.expr()) { if let Some(ast::Expr::PathExpr(p)) = ar.const_arg().and_then(|x| x.expr())
if let Some(path) = p.path() { && let Some(path) = p.path()
if let Some(name_ref) = path.as_single_name_ref() { && let Some(name_ref) = path.as_single_name_ref()
if let Some(param) = known_generics.iter().find(|gp| { && let Some(param) = known_generics.iter().find(|gp| {
if let ast::GenericParam::ConstParam(cp) = gp { if let ast::GenericParam::ConstParam(cp) = gp {
cp.name().is_some_and(|n| n.text() == name_ref.text()) cp.name().is_some_and(|n| n.text() == name_ref.text())
} else { } else {
false false
}
}) {
generics.push(param);
}
} }
} })
{
generics.push(param);
} }
} }
_ => (), _ => (),

View file

@ -404,11 +404,10 @@ impl Anchor {
} }
if let Some(expr) = if let Some(expr) =
node.parent().and_then(ast::StmtList::cast).and_then(|it| it.tail_expr()) node.parent().and_then(ast::StmtList::cast).and_then(|it| it.tail_expr())
&& expr.syntax() == &node
{ {
if expr.syntax() == &node { cov_mark::hit!(test_extract_var_last_expr);
cov_mark::hit!(test_extract_var_last_expr); return Some(Anchor::Before(node));
return Some(Anchor::Before(node));
}
} }
if let Some(parent) = node.parent() { if let Some(parent) = node.parent() {
@ -427,10 +426,10 @@ impl Anchor {
} }
if let Some(stmt) = ast::Stmt::cast(node.clone()) { if let Some(stmt) = ast::Stmt::cast(node.clone()) {
if let ast::Stmt::ExprStmt(stmt) = stmt { if let ast::Stmt::ExprStmt(stmt) = stmt
if stmt.expr().as_ref() == Some(to_extract) { && stmt.expr().as_ref() == Some(to_extract)
return Some(Anchor::Replace(stmt)); {
} return Some(Anchor::Replace(stmt));
} }
return Some(Anchor::Before(node)); return Some(Anchor::Before(node));
} }

View file

@ -148,11 +148,11 @@ fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<St
let self_name = self_name(ast_func); let self_name = self_name(ast_func);
format_to!(example, "use {use_path};\n\n"); format_to!(example, "use {use_path};\n\n");
if let Some(self_name) = &self_name { if let Some(self_name) = &self_name
if let Some(mut_) = is_ref_mut_self(ast_func) { && let Some(mut_) = is_ref_mut_self(ast_func)
let mut_ = if mut_ { "mut " } else { "" }; {
format_to!(example, "let {mut_}{self_name} = ;\n"); let mut_ = if mut_ { "mut " } else { "" };
} format_to!(example, "let {mut_}{self_name} = ;\n");
} }
for param_name in &ref_mut_params { for param_name in &ref_mut_params {
format_to!(example, "let mut {param_name} = ;\n"); format_to!(example, "let mut {param_name} = ;\n");
@ -170,10 +170,10 @@ fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<St
format_to!(example, "{function_call};\n"); format_to!(example, "{function_call};\n");
} }
// Check the mutated values // Check the mutated values
if let Some(self_name) = &self_name { if let Some(self_name) = &self_name
if is_ref_mut_self(ast_func) == Some(true) { && is_ref_mut_self(ast_func) == Some(true)
format_to!(example, "assert_eq!({self_name}, );"); {
} format_to!(example, "assert_eq!({self_name}, );");
} }
for param_name in &ref_mut_params { for param_name in &ref_mut_params {
format_to!(example, "assert_eq!({param_name}, );"); format_to!(example, "assert_eq!({param_name}, );");

View file

@ -111,10 +111,10 @@ pub(crate) fn generate_fn_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>)
], ],
); );
if let Some(cap) = ctx.config.snippet_cap { if let Some(cap) = ctx.config.snippet_cap
if let Some(name) = ty_alias.name() { && let Some(name) = ty_alias.name()
edit.add_annotation(name.syntax(), builder.make_placeholder_snippet(cap)); {
} edit.add_annotation(name.syntax(), builder.make_placeholder_snippet(cap));
} }
builder.add_file_edits(ctx.vfs_file_id(), edit); builder.add_file_edits(ctx.vfs_file_id(), edit);

View file

@ -70,10 +70,10 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let TargetInfo { target_module, adt_info, target, file } = let TargetInfo { target_module, adt_info, target, file } =
fn_target_info(ctx, path, &call, fn_name)?; fn_target_info(ctx, path, &call, fn_name)?;
if let Some(m) = target_module { if let Some(m) = target_module
if !is_editable_crate(m.krate(), ctx.db()) { && !is_editable_crate(m.krate(), ctx.db())
return None; {
} return None;
} }
let function_builder = let function_builder =

View file

@ -433,12 +433,11 @@ fn build_source_change(
new_fn.indent(1.into()); new_fn.indent(1.into());
// Insert a tabstop only for last method we generate // Insert a tabstop only for last method we generate
if i == record_fields_count - 1 { if i == record_fields_count - 1
if let Some(cap) = ctx.config.snippet_cap { && let Some(cap) = ctx.config.snippet_cap
if let Some(name) = new_fn.name() { && let Some(name) = new_fn.name()
builder.add_tabstop_before(cap, name); {
} builder.add_tabstop_before(cap, name);
}
} }
assoc_item_list.add_item(new_fn.clone().into()); assoc_item_list.add_item(new_fn.clone().into());

View file

@ -58,11 +58,11 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
let mut editor = edit.make_editor(nominal.syntax()); let mut editor = edit.make_editor(nominal.syntax());
// Add a tabstop after the left curly brace // Add a tabstop after the left curly brace
if let Some(cap) = ctx.config.snippet_cap { if let Some(cap) = ctx.config.snippet_cap
if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) { && let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token())
let tabstop = edit.make_tabstop_after(cap); {
editor.add_annotation(l_curly, tabstop); let tabstop = edit.make_tabstop_after(cap);
} editor.add_annotation(l_curly, tabstop);
} }
insert_impl(&mut editor, &impl_, &nominal); insert_impl(&mut editor, &impl_, &nominal);

View file

@ -175,18 +175,18 @@ fn remove_items_visibility(item: &ast::AssocItem) {
} }
fn strip_body(item: &ast::AssocItem) { fn strip_body(item: &ast::AssocItem) {
if let ast::AssocItem::Fn(f) = item { if let ast::AssocItem::Fn(f) = item
if let Some(body) = f.body() { && let Some(body) = f.body()
// In contrast to function bodies, we want to see no ws before a semicolon. {
// So let's remove them if we see any. // In contrast to function bodies, we want to see no ws before a semicolon.
if let Some(prev) = body.syntax().prev_sibling_or_token() { // So let's remove them if we see any.
if prev.kind() == SyntaxKind::WHITESPACE { if let Some(prev) = body.syntax().prev_sibling_or_token()
ted::remove(prev); && prev.kind() == SyntaxKind::WHITESPACE
} {
} ted::remove(prev);
ted::replace(body.syntax(), make::tokens::semicolon());
} }
ted::replace(body.syntax(), make::tokens::semicolon());
}; };
} }

View file

@ -393,19 +393,17 @@ fn inline(
// `FileReference` incorrect // `FileReference` incorrect
if let Some(imp) = if let Some(imp) =
sema.ancestors_with_macros(fn_body.syntax().clone()).find_map(ast::Impl::cast) sema.ancestors_with_macros(fn_body.syntax().clone()).find_map(ast::Impl::cast)
&& !node.syntax().ancestors().any(|anc| &anc == imp.syntax())
&& let Some(t) = imp.self_ty()
{ {
if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) { while let Some(self_tok) = body
if let Some(t) = imp.self_ty() { .syntax()
while let Some(self_tok) = body .descendants_with_tokens()
.syntax() .filter_map(NodeOrToken::into_token)
.descendants_with_tokens() .find(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
.filter_map(NodeOrToken::into_token) {
.find(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW) let replace_with = t.clone_subtree().syntax().clone_for_update();
{ ted::replace(self_tok, replace_with);
let replace_with = t.clone_subtree().syntax().clone_for_update();
ted::replace(self_tok, replace_with);
}
}
} }
} }
@ -415,10 +413,10 @@ fn inline(
for stmt in fn_body.statements() { for stmt in fn_body.statements() {
if let Some(let_stmt) = ast::LetStmt::cast(stmt.syntax().to_owned()) { if let Some(let_stmt) = ast::LetStmt::cast(stmt.syntax().to_owned()) {
for has_token in let_stmt.syntax().children_with_tokens() { for has_token in let_stmt.syntax().children_with_tokens() {
if let Some(node) = has_token.as_node() { if let Some(node) = has_token.as_node()
if let Some(ident_pat) = ast::IdentPat::cast(node.to_owned()) { && let Some(ident_pat) = ast::IdentPat::cast(node.to_owned())
func_let_vars.insert(ident_pat.syntax().text().to_string()); {
} func_let_vars.insert(ident_pat.syntax().text().to_string());
} }
} }
} }
@ -534,16 +532,15 @@ fn inline(
} }
} }
if let Some(generic_arg_list) = generic_arg_list.clone() { if let Some(generic_arg_list) = generic_arg_list.clone()
if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax())) && let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax()))
{ {
body.reindent_to(IndentLevel(0)); body.reindent_to(IndentLevel(0));
if let Some(new_body) = ast::BlockExpr::cast( if let Some(new_body) = ast::BlockExpr::cast(
PathTransform::function_call(target, source, function, generic_arg_list) PathTransform::function_call(target, source, function, generic_arg_list)
.apply(body.syntax()), .apply(body.syntax()),
) { ) {
body = new_body; body = new_body;
}
} }
} }

View file

@ -43,10 +43,10 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let db = ctx.db(); let db = ctx.db();
let const_: ast::Const = ctx.find_node_at_offset()?; let const_: ast::Const = ctx.find_node_at_offset()?;
// Don't show the assist when the cursor is at the const's body. // Don't show the assist when the cursor is at the const's body.
if let Some(body) = const_.body() { if let Some(body) = const_.body()
if body.syntax().text_range().contains(ctx.offset()) { && body.syntax().text_range().contains(ctx.offset())
return None; {
} return None;
} }
let parent_fn = const_.syntax().ancestors().find_map(ast::Fn::cast)?; let parent_fn = const_.syntax().ancestors().find_map(ast::Fn::cast)?;

View file

@ -62,10 +62,10 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) ->
return None; return None;
}; };
if let Some(parent) = tgt.syntax().parent() { if let Some(parent) = tgt.syntax().parent()
if matches!(parent.kind(), syntax::SyntaxKind::BIN_EXPR | syntax::SyntaxKind::LET_STMT) { && matches!(parent.kind(), syntax::SyntaxKind::BIN_EXPR | syntax::SyntaxKind::LET_STMT)
return None; {
} return None;
} }
let target = tgt.syntax().text_range(); let target = tgt.syntax().text_range();
@ -90,10 +90,10 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let mut editor = SyntaxEditor::new(edit_tgt); let mut editor = SyntaxEditor::new(edit_tgt);
for (stmt, rhs) in assignments { for (stmt, rhs) in assignments {
let mut stmt = stmt.syntax().clone(); let mut stmt = stmt.syntax().clone();
if let Some(parent) = stmt.parent() { if let Some(parent) = stmt.parent()
if ast::ExprStmt::cast(parent.clone()).is_some() { && ast::ExprStmt::cast(parent.clone()).is_some()
stmt = parent.clone(); {
} stmt = parent.clone();
} }
editor.replace(stmt, rhs.syntax()); editor.replace(stmt, rhs.syntax());
} }

View file

@ -80,15 +80,15 @@ pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
// parse inside string to escape `"` // parse inside string to escape `"`
let escaped = value.escape_default().to_string(); let escaped = value.escape_default().to_string();
let suffix = string_suffix(token.text()).unwrap_or_default(); let suffix = string_suffix(token.text()).unwrap_or_default();
if let Some(offsets) = token.quote_offsets() { if let Some(offsets) = token.quote_offsets()
if token.text()[offsets.contents - token.syntax().text_range().start()] == escaped { && token.text()[offsets.contents - token.syntax().text_range().start()] == escaped
let end_quote = offsets.quotes.1; {
let end_quote = let end_quote = offsets.quotes.1;
TextRange::new(end_quote.start(), end_quote.end() - TextSize::of(suffix)); let end_quote =
edit.replace(offsets.quotes.0, "\""); TextRange::new(end_quote.start(), end_quote.end() - TextSize::of(suffix));
edit.replace(end_quote, "\""); edit.replace(offsets.quotes.0, "\"");
return; edit.replace(end_quote, "\"");
} return;
} }
edit.replace(token.syntax().text_range(), format!("\"{escaped}\"{suffix}")); edit.replace(token.syntax().text_range(), format!("\"{escaped}\"{suffix}"));

View file

@ -102,10 +102,10 @@ pub(crate) fn replace_qualified_name_with_use(
fn drop_generic_args(path: &ast::Path) -> ast::Path { fn drop_generic_args(path: &ast::Path) -> ast::Path {
let path = path.clone_for_update(); let path = path.clone_for_update();
if let Some(segment) = path.segment() { if let Some(segment) = path.segment()
if let Some(generic_args) = segment.generic_arg_list() { && let Some(generic_args) = segment.generic_arg_list()
ted::remove(generic_args.syntax()); {
} ted::remove(generic_args.syntax());
} }
path path
} }

View file

@ -41,10 +41,10 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
return None; return None;
} }
// Do nothing if the method is a member of trait. // Do nothing if the method is a member of trait.
if let Some(impl_) = function.syntax().ancestors().nth(2).and_then(ast::Impl::cast) { if let Some(impl_) = function.syntax().ancestors().nth(2).and_then(ast::Impl::cast)
if impl_.trait_().is_some() { && impl_.trait_().is_some()
return None; {
} return None;
} }
// Remove the `async` keyword plus whitespace after it, if any. // Remove the `async` keyword plus whitespace after it, if any.

View file

@ -72,20 +72,20 @@ pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let mut exprs_to_unwrap = Vec::new(); let mut exprs_to_unwrap = Vec::new();
let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_unwrap, e); let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_unwrap, e);
walk_expr(&body_expr, &mut |expr| { walk_expr(&body_expr, &mut |expr| {
if let ast::Expr::ReturnExpr(ret_expr) = expr { if let ast::Expr::ReturnExpr(ret_expr) = expr
if let Some(ret_expr_arg) = &ret_expr.expr() { && let Some(ret_expr_arg) = &ret_expr.expr()
for_each_tail_expr(ret_expr_arg, tail_cb); {
} for_each_tail_expr(ret_expr_arg, tail_cb);
} }
}); });
for_each_tail_expr(&body_expr, tail_cb); for_each_tail_expr(&body_expr, tail_cb);
let is_unit_type = is_unit_type(&happy_type); let is_unit_type = is_unit_type(&happy_type);
if is_unit_type { if is_unit_type {
if let Some(NodeOrToken::Token(token)) = ret_type.syntax().next_sibling_or_token() { if let Some(NodeOrToken::Token(token)) = ret_type.syntax().next_sibling_or_token()
if token.kind() == SyntaxKind::WHITESPACE { && token.kind() == SyntaxKind::WHITESPACE
editor.delete(token); {
} editor.delete(token);
} }
editor.delete(ret_type.syntax()); editor.delete(ret_type.syntax());
@ -162,10 +162,10 @@ pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) ->
} }
} }
if let Some(cap) = ctx.config.snippet_cap { if let Some(cap) = ctx.config.snippet_cap
if let Some(final_placeholder) = final_placeholder { && let Some(final_placeholder) = final_placeholder
editor.add_annotation(final_placeholder.syntax(), builder.make_tabstop_after(cap)); {
} editor.add_annotation(final_placeholder.syntax(), builder.make_tabstop_after(cap));
} }
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());

View file

@ -47,10 +47,10 @@ pub(crate) fn unwrap_tuple(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
if tuple_pat.fields().count() != tuple_init.fields().count() { if tuple_pat.fields().count() != tuple_init.fields().count() {
return None; return None;
} }
if let Some(tys) = &tuple_ty { if let Some(tys) = &tuple_ty
if tuple_pat.fields().count() != tys.fields().count() { && tuple_pat.fields().count() != tys.fields().count()
return None; {
} return None;
} }
let parent = let_kw.parent()?; let parent = let_kw.parent()?;

View file

@ -101,24 +101,24 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let mut exprs_to_wrap = Vec::new(); let mut exprs_to_wrap = Vec::new();
let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e); let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e);
walk_expr(&body_expr, &mut |expr| { walk_expr(&body_expr, &mut |expr| {
if let Expr::ReturnExpr(ret_expr) = expr { if let Expr::ReturnExpr(ret_expr) = expr
if let Some(ret_expr_arg) = &ret_expr.expr() { && let Some(ret_expr_arg) = &ret_expr.expr()
for_each_tail_expr(ret_expr_arg, tail_cb); {
} for_each_tail_expr(ret_expr_arg, tail_cb);
} }
}); });
for_each_tail_expr(&body_expr, tail_cb); for_each_tail_expr(&body_expr, tail_cb);
for ret_expr_arg in exprs_to_wrap { for ret_expr_arg in exprs_to_wrap {
if let Some(ty) = ctx.sema.type_of_expr(&ret_expr_arg) { if let Some(ty) = ctx.sema.type_of_expr(&ret_expr_arg)
if ty.adjusted().could_unify_with(ctx.db(), &semantic_new_return_ty) { && ty.adjusted().could_unify_with(ctx.db(), &semantic_new_return_ty)
// The type is already correct, don't wrap it. {
// We deliberately don't use `could_unify_with_deeply()`, because as long as the outer // The type is already correct, don't wrap it.
// enum matches it's okay for us, as we don't trigger the assist if the return type // We deliberately don't use `could_unify_with_deeply()`, because as long as the outer
// is already `Option`/`Result`, so mismatched exact type is more likely a mistake // enum matches it's okay for us, as we don't trigger the assist if the return type
// than something intended. // is already `Option`/`Result`, so mismatched exact type is more likely a mistake
continue; // than something intended.
} continue;
} }
let happy_wrapped = make.expr_call( let happy_wrapped = make.expr_call(
@ -147,13 +147,13 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
ast::GenericArg::LifetimeArg(_) => false, ast::GenericArg::LifetimeArg(_) => false,
_ => true, _ => true,
}); });
if let Some(error_type_arg) = error_type_arg { if let Some(error_type_arg) = error_type_arg
if let Some(cap) = ctx.config.snippet_cap { && let Some(cap) = ctx.config.snippet_cap
editor.add_annotation( {
error_type_arg.syntax(), editor.add_annotation(
builder.make_placeholder_snippet(cap), error_type_arg.syntax(),
); builder.make_placeholder_snippet(cap),
} );
} }
} }

View file

@ -200,13 +200,12 @@ fn wrap_derive(
], ],
); );
if let Some(snippet_cap) = ctx.config.snippet_cap { if let Some(snippet_cap) = ctx.config.snippet_cap
if let Some(first_meta) = && let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token()) cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
{ {
let tabstop = edit.make_tabstop_after(snippet_cap); let tabstop = edit.make_tabstop_after(snippet_cap);
editor.add_annotation(first_meta, tabstop); editor.add_annotation(first_meta, tabstop);
}
} }
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());
@ -256,13 +255,12 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
editor.replace(attr.syntax(), cfg_attr.syntax()); editor.replace(attr.syntax(), cfg_attr.syntax());
if let Some(snippet_cap) = ctx.config.snippet_cap { if let Some(snippet_cap) = ctx.config.snippet_cap
if let Some(first_meta) = && let Some(first_meta) =
cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token()) cfg_attr.meta().and_then(|meta| meta.token_tree()).and_then(|tt| tt.l_paren_token())
{ {
let tabstop = edit.make_tabstop_after(snippet_cap); let tabstop = edit.make_tabstop_after(snippet_cap);
editor.add_annotation(first_meta, tabstop); editor.add_annotation(first_meta, tabstop);
}
} }
editor.add_mappings(make.finish_with_mappings()); editor.add_mappings(make.finish_with_mappings());

View file

@ -131,10 +131,10 @@ pub fn filter_assoc_items(
if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
&& assoc_item.attrs(sema.db).has_doc_hidden() && assoc_item.attrs(sema.db).has_doc_hidden()
{ {
if let hir::AssocItem::Function(f) = assoc_item { if let hir::AssocItem::Function(f) = assoc_item
if !f.has_body(sema.db) { && !f.has_body(sema.db)
return true; {
} return true;
} }
return false; return false;
} }
@ -514,10 +514,10 @@ pub(crate) fn find_struct_impl(
if !(same_ty && not_trait_impl) { None } else { Some(impl_blk) } if !(same_ty && not_trait_impl) { None } else { Some(impl_blk) }
}); });
if let Some(ref impl_blk) = block { if let Some(ref impl_blk) = block
if has_any_fn(impl_blk, names) { && has_any_fn(impl_blk, names)
return None; {
} return None;
} }
Some(block) Some(block)
@ -526,12 +526,11 @@ pub(crate) fn find_struct_impl(
fn has_any_fn(imp: &ast::Impl, names: &[String]) -> bool { fn has_any_fn(imp: &ast::Impl, names: &[String]) -> bool {
if let Some(il) = imp.assoc_item_list() { if let Some(il) = imp.assoc_item_list() {
for item in il.assoc_items() { for item in il.assoc_items() {
if let ast::AssocItem::Fn(f) = item { if let ast::AssocItem::Fn(f) = item
if let Some(name) = f.name() { && let Some(name) = f.name()
if names.iter().any(|n| n.eq_ignore_ascii_case(&name.text())) { && names.iter().any(|n| n.eq_ignore_ascii_case(&name.text()))
return true; {
} return true;
}
} }
} }
} }
@ -1021,12 +1020,12 @@ pub(crate) fn trimmed_text_range(source_file: &SourceFile, initial_range: TextRa
pub(crate) fn convert_param_list_to_arg_list(list: ast::ParamList) -> ast::ArgList { pub(crate) fn convert_param_list_to_arg_list(list: ast::ParamList) -> ast::ArgList {
let mut args = vec![]; let mut args = vec![];
for param in list.params() { for param in list.params() {
if let Some(ast::Pat::IdentPat(pat)) = param.pat() { if let Some(ast::Pat::IdentPat(pat)) = param.pat()
if let Some(name) = pat.name() { && let Some(name) = pat.name()
let name = name.to_string(); {
let expr = make::expr_path(make::ext::ident_path(&name)); let name = name.to_string();
args.push(expr); let expr = make::expr_path(make::ext::ident_path(&name));
} args.push(expr);
} }
} }
make::arg_list(args) make::arg_list(args)
@ -1138,12 +1137,11 @@ pub fn is_body_const(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> bo
}; };
match expr { match expr {
ast::Expr::CallExpr(call) => { ast::Expr::CallExpr(call) => {
if let Some(ast::Expr::PathExpr(path_expr)) = call.expr() { if let Some(ast::Expr::PathExpr(path_expr)) = call.expr()
if let Some(PathResolution::Def(ModuleDef::Function(func))) = && let Some(PathResolution::Def(ModuleDef::Function(func))) =
path_expr.path().and_then(|path| sema.resolve_path(&path)) path_expr.path().and_then(|path| sema.resolve_path(&path))
{ {
is_const &= func.is_const(sema.db); is_const &= func.is_const(sema.db);
}
} }
} }
ast::Expr::MethodCallExpr(call) => { ast::Expr::MethodCallExpr(call) => {

View file

@ -111,10 +111,11 @@ impl Completions {
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,
super_chain_len: Option<usize>, super_chain_len: Option<usize>,
) { ) {
if let Some(len) = super_chain_len { if let Some(len) = super_chain_len
if len > 0 && len < ctx.depth_from_crate_root { && len > 0
self.add_keyword(ctx, "super::"); && len < ctx.depth_from_crate_root
} {
self.add_keyword(ctx, "super::");
} }
} }
@ -643,10 +644,10 @@ fn enum_variants_with_paths(
let variants = enum_.variants(ctx.db); let variants = enum_.variants(ctx.db);
if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) { if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_))
if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) { && impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_))
variants.iter().for_each(|variant| process_variant(*variant)); {
} variants.iter().for_each(|variant| process_variant(*variant));
} }
for variant in variants { for variant in variants {

View file

@ -258,12 +258,11 @@ fn complete_methods(
fn on_trait_method(&mut self, func: hir::Function) -> ControlFlow<()> { fn on_trait_method(&mut self, func: hir::Function) -> ControlFlow<()> {
// This needs to come before the `seen_methods` test, so that if we see the same method twice, // This needs to come before the `seen_methods` test, so that if we see the same method twice,
// once as inherent and once not, we will include it. // once as inherent and once not, we will include it.
if let ItemContainer::Trait(trait_) = func.container(self.ctx.db) { if let ItemContainer::Trait(trait_) = func.container(self.ctx.db)
if self.ctx.exclude_traits.contains(&trait_) && (self.ctx.exclude_traits.contains(&trait_)
|| trait_.complete(self.ctx.db) == Complete::IgnoreMethods || trait_.complete(self.ctx.db) == Complete::IgnoreMethods)
{ {
return ControlFlow::Continue(()); return ControlFlow::Continue(());
}
} }
if func.self_param(self.ctx.db).is_some() if func.self_param(self.ctx.db).is_some()

View file

@ -128,10 +128,10 @@ fn params_from_stmt_list_scope(
{ {
let module = scope.module().into(); let module = scope.module().into();
scope.process_all_names(&mut |name, def| { scope.process_all_names(&mut |name, def| {
if let hir::ScopeDef::Local(local) = def { if let hir::ScopeDef::Local(local) = def
if let Ok(ty) = local.ty(ctx.db).display_source_code(ctx.db, module, true) { && let Ok(ty) = local.ty(ctx.db).display_source_code(ctx.db, module, true)
cb(name, ty); {
} cb(name, ty);
} }
}); });
} }

View file

@ -228,24 +228,22 @@ fn add_function_impl_(
.set_documentation(func.docs(ctx.db)) .set_documentation(func.docs(ctx.db))
.set_relevance(CompletionRelevance { exact_name_match: true, ..Default::default() }); .set_relevance(CompletionRelevance { exact_name_match: true, ..Default::default() });
if let Some(source) = ctx.sema.source(func) { if let Some(source) = ctx.sema.source(func)
if let Some(transformed_fn) = && let Some(transformed_fn) =
get_transformed_fn(ctx, source.value, impl_def, async_sugaring) get_transformed_fn(ctx, source.value, impl_def, async_sugaring)
{ {
let function_decl = let function_decl = function_declaration(ctx, &transformed_fn, source.file_id.macro_file());
function_declaration(ctx, &transformed_fn, source.file_id.macro_file()); match ctx.config.snippet_cap {
match ctx.config.snippet_cap { Some(cap) => {
Some(cap) => { let snippet = format!("{function_decl} {{\n $0\n}}");
let snippet = format!("{function_decl} {{\n $0\n}}"); item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet));
item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet)); }
} None => {
None => { let header = format!("{function_decl} {{");
let header = format!("{function_decl} {{"); item.text_edit(TextEdit::replace(replacement_range, header));
item.text_edit(TextEdit::replace(replacement_range, header)); }
} };
}; item.add_to(acc, ctx.db);
item.add_to(acc, ctx.db);
}
} }
} }
@ -447,36 +445,36 @@ fn add_const_impl(
) { ) {
let const_name = const_.name(ctx.db).map(|n| n.display_no_db(ctx.edition).to_smolstr()); let const_name = const_.name(ctx.db).map(|n| n.display_no_db(ctx.edition).to_smolstr());
if let Some(const_name) = const_name { if let Some(const_name) = const_name
if let Some(source) = ctx.sema.source(const_) { && let Some(source) = ctx.sema.source(const_)
let assoc_item = ast::AssocItem::Const(source.value); {
if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) { let assoc_item = ast::AssocItem::Const(source.value);
let transformed_const = match transformed_item { if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) {
ast::AssocItem::Const(const_) => const_, let transformed_const = match transformed_item {
_ => unreachable!(), ast::AssocItem::Const(const_) => const_,
}; _ => unreachable!(),
};
let label = let label =
make_const_compl_syntax(ctx, &transformed_const, source.file_id.macro_file()); make_const_compl_syntax(ctx, &transformed_const, source.file_id.macro_file());
let replacement = format!("{label} "); let replacement = format!("{label} ");
let mut item = let mut item =
CompletionItem::new(SymbolKind::Const, replacement_range, label, ctx.edition); CompletionItem::new(SymbolKind::Const, replacement_range, label, ctx.edition);
item.lookup_by(format_smolstr!("const {const_name}")) item.lookup_by(format_smolstr!("const {const_name}"))
.set_documentation(const_.docs(ctx.db)) .set_documentation(const_.docs(ctx.db))
.set_relevance(CompletionRelevance { .set_relevance(CompletionRelevance {
exact_name_match: true, exact_name_match: true,
..Default::default() ..Default::default()
}); });
match ctx.config.snippet_cap { match ctx.config.snippet_cap {
Some(cap) => item.snippet_edit( Some(cap) => item.snippet_edit(
cap, cap,
TextEdit::replace(replacement_range, format!("{replacement}$0;")), TextEdit::replace(replacement_range, format!("{replacement}$0;")),
), ),
None => item.text_edit(TextEdit::replace(replacement_range, replacement)), None => item.text_edit(TextEdit::replace(replacement_range, replacement)),
}; };
item.add_to(acc, ctx.db); item.add_to(acc, ctx.db);
}
} }
} }
} }

View file

@ -26,18 +26,17 @@ pub(crate) fn complete_mod(
let mut current_module = ctx.module; let mut current_module = ctx.module;
// For `mod $0`, `ctx.module` is its parent, but for `mod f$0`, it's `mod f` itself, but we're // For `mod $0`, `ctx.module` is its parent, but for `mod f$0`, it's `mod f` itself, but we're
// interested in its parent. // interested in its parent.
if ctx.original_token.kind() == SyntaxKind::IDENT { if ctx.original_token.kind() == SyntaxKind::IDENT
if let Some(module) = && let Some(module) =
ctx.original_token.parent_ancestors().nth(1).and_then(ast::Module::cast) ctx.original_token.parent_ancestors().nth(1).and_then(ast::Module::cast)
{ {
match ctx.sema.to_def(&module) { match ctx.sema.to_def(&module) {
Some(module) if module == current_module => { Some(module) if module == current_module => {
if let Some(parent) = current_module.parent(ctx.db) { if let Some(parent) = current_module.parent(ctx.db) {
current_module = parent; current_module = parent;
}
} }
_ => {}
} }
_ => {}
} }
} }

View file

@ -64,18 +64,17 @@ pub(crate) fn complete_pattern(
if let Some(hir::Adt::Enum(e)) = if let Some(hir::Adt::Enum(e)) =
ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt()) ctx.expected_type.as_ref().and_then(|ty| ty.strip_references().as_adt())
&& (refutable || single_variant_enum(e))
{ {
if refutable || single_variant_enum(e) { super::enum_variants_with_paths(
super::enum_variants_with_paths( acc,
acc, ctx,
ctx, e,
e, &pattern_ctx.impl_,
&pattern_ctx.impl_, |acc, ctx, variant, path| {
|acc, ctx, variant, path| { acc.add_qualified_variant_pat(ctx, pattern_ctx, variant, path);
acc.add_qualified_variant_pat(ctx, pattern_ctx, variant, path); },
}, );
);
}
} }
// FIXME: ideally, we should look at the type we are matching against and // FIXME: ideally, we should look at the type we are matching against and

View file

@ -65,26 +65,19 @@ pub(crate) fn complete_postfix(
let cfg = ctx.config.import_path_config(ctx.is_nightly); let cfg = ctx.config.import_path_config(ctx.is_nightly);
if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop()
if receiver_ty.impls_trait(ctx.db, drop_trait, &[]) { && receiver_ty.impls_trait(ctx.db, drop_trait, &[])
if let Some(drop_fn) = ctx.famous_defs().core_mem_drop() { && let Some(drop_fn) = ctx.famous_defs().core_mem_drop()
if let Some(path) = && let Some(path) = ctx.module.find_path(ctx.db, ItemInNs::Values(drop_fn.into()), cfg)
ctx.module.find_path(ctx.db, ItemInNs::Values(drop_fn.into()), cfg) {
{ cov_mark::hit!(postfix_drop_completion);
cov_mark::hit!(postfix_drop_completion); let mut item = postfix_snippet(
let mut item = postfix_snippet( "drop",
"drop", "fn drop(&mut self)",
"fn drop(&mut self)", &format!("{path}($0{receiver_text})", path = path.display(ctx.db, ctx.edition)),
&format!( );
"{path}($0{receiver_text})", item.set_documentation(drop_fn.docs(ctx.db));
path = path.display(ctx.db, ctx.edition) item.add_to(acc, ctx.db);
),
);
item.set_documentation(drop_fn.docs(ctx.db));
item.add_to(acc, ctx.db);
}
}
}
} }
postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc, ctx.db); postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc, ctx.db);
@ -117,56 +110,50 @@ pub(crate) fn complete_postfix(
let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references()); let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references());
let mut is_in_cond = false; let mut is_in_cond = false;
if let Some(parent) = dot_receiver_including_refs.syntax().parent() { if let Some(parent) = dot_receiver_including_refs.syntax().parent()
if let Some(second_ancestor) = parent.parent() { && let Some(second_ancestor) = parent.parent()
let sec_ancestor_kind = second_ancestor.kind(); {
if let Some(expr) = <Either<ast::IfExpr, ast::WhileExpr>>::cast(second_ancestor) { let sec_ancestor_kind = second_ancestor.kind();
is_in_cond = match expr { if let Some(expr) = <Either<ast::IfExpr, ast::WhileExpr>>::cast(second_ancestor) {
Either::Left(it) => it.condition().is_some_and(|cond| *cond.syntax() == parent), is_in_cond = match expr {
Either::Right(it) => { Either::Left(it) => it.condition().is_some_and(|cond| *cond.syntax() == parent),
it.condition().is_some_and(|cond| *cond.syntax() == parent) Either::Right(it) => it.condition().is_some_and(|cond| *cond.syntax() == parent),
}
}
} }
match &try_enum { }
Some(try_enum) if is_in_cond => match try_enum { match &try_enum {
TryEnum::Result => { Some(try_enum) if is_in_cond => match try_enum {
postfix_snippet( TryEnum::Result => {
"let", postfix_snippet("let", "let Ok(_)", &format!("let Ok($0) = {receiver_text}"))
"let Ok(_)",
&format!("let Ok($0) = {receiver_text}"),
)
.add_to(acc, ctx.db);
postfix_snippet(
"letm",
"let Ok(mut _)",
&format!("let Ok(mut $0) = {receiver_text}"),
)
.add_to(acc, ctx.db);
}
TryEnum::Option => {
postfix_snippet(
"let",
"let Some(_)",
&format!("let Some($0) = {receiver_text}"),
)
.add_to(acc, ctx.db);
postfix_snippet(
"letm",
"let Some(mut _)",
&format!("let Some(mut $0) = {receiver_text}"),
)
.add_to(acc, ctx.db);
}
},
_ if matches!(sec_ancestor_kind, STMT_LIST | EXPR_STMT) => {
postfix_snippet("let", "let", &format!("let $0 = {receiver_text};"))
.add_to(acc, ctx.db);
postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};"))
.add_to(acc, ctx.db); .add_to(acc, ctx.db);
postfix_snippet(
"letm",
"let Ok(mut _)",
&format!("let Ok(mut $0) = {receiver_text}"),
)
.add_to(acc, ctx.db);
} }
_ => (), TryEnum::Option => {
postfix_snippet(
"let",
"let Some(_)",
&format!("let Some($0) = {receiver_text}"),
)
.add_to(acc, ctx.db);
postfix_snippet(
"letm",
"let Some(mut _)",
&format!("let Some(mut $0) = {receiver_text}"),
)
.add_to(acc, ctx.db);
}
},
_ if matches!(sec_ancestor_kind, STMT_LIST | EXPR_STMT) => {
postfix_snippet("let", "let", &format!("let $0 = {receiver_text};"))
.add_to(acc, ctx.db);
postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};"))
.add_to(acc, ctx.db);
} }
_ => (),
} }
} }
@ -258,25 +245,25 @@ pub(crate) fn complete_postfix(
) )
.add_to(acc, ctx.db); .add_to(acc, ctx.db);
postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc, ctx.db); postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc, ctx.db);
} else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() { } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator()
if receiver_ty.impls_trait(ctx.db, trait_, &[]) { && receiver_ty.impls_trait(ctx.db, trait_, &[])
postfix_snippet( {
"for", postfix_snippet(
"for ele in expr {}", "for",
&format!("for ele in {receiver_text} {{\n $0\n}}"), "for ele in expr {}",
) &format!("for ele in {receiver_text} {{\n $0\n}}"),
.add_to(acc, ctx.db); )
} .add_to(acc, ctx.db);
} }
} }
let mut block_should_be_wrapped = true; let mut block_should_be_wrapped = true;
if dot_receiver.syntax().kind() == BLOCK_EXPR { if dot_receiver.syntax().kind() == BLOCK_EXPR {
block_should_be_wrapped = false; block_should_be_wrapped = false;
if let Some(parent) = dot_receiver.syntax().parent() { if let Some(parent) = dot_receiver.syntax().parent()
if matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) { && matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR)
block_should_be_wrapped = true; {
} block_should_be_wrapped = true;
} }
}; };
{ {
@ -292,10 +279,10 @@ pub(crate) fn complete_postfix(
postfix_snippet("const", "const {}", &const_completion_string).add_to(acc, ctx.db); postfix_snippet("const", "const {}", &const_completion_string).add_to(acc, ctx.db);
} }
if let ast::Expr::Literal(literal) = dot_receiver_including_refs.clone() { if let ast::Expr::Literal(literal) = dot_receiver_including_refs.clone()
if let Some(literal_text) = ast::String::cast(literal.token()) { && let Some(literal_text) = ast::String::cast(literal.token())
add_format_like_completions(acc, ctx, &dot_receiver_including_refs, cap, &literal_text); {
} add_format_like_completions(acc, ctx, &dot_receiver_including_refs, cap, &literal_text);
} }
postfix_snippet( postfix_snippet(

View file

@ -54,12 +54,10 @@ pub(crate) fn complete_use_path(
for (name, def) in module_scope { for (name, def) in module_scope {
if let (Some(attrs), Some(defining_crate)) = if let (Some(attrs), Some(defining_crate)) =
(def.attrs(ctx.db), def.krate(ctx.db)) (def.attrs(ctx.db), def.krate(ctx.db))
&& (!ctx.check_stability(Some(&attrs))
|| ctx.is_doc_hidden(&attrs, defining_crate))
{ {
if !ctx.check_stability(Some(&attrs)) continue;
|| ctx.is_doc_hidden(&attrs, defining_crate)
{
continue;
}
} }
let is_name_already_imported = let is_name_already_imported =
already_imported_names.contains(name.as_str()); already_imported_names.contains(name.as_str());

View file

@ -20,11 +20,11 @@ pub(crate) fn complete_vis_path(
// Try completing next child module of the path that is still a parent of the current module // Try completing next child module of the path that is still a parent of the current module
let next_towards_current = let next_towards_current =
ctx.module.path_to_root(ctx.db).into_iter().take_while(|it| it != module).last(); ctx.module.path_to_root(ctx.db).into_iter().take_while(|it| it != module).last();
if let Some(next) = next_towards_current { if let Some(next) = next_towards_current
if let Some(name) = next.name(ctx.db) { && let Some(name) = next.name(ctx.db)
cov_mark::hit!(visibility_qualified); {
acc.add_module(ctx, path_ctx, next, name, vec![]); cov_mark::hit!(visibility_qualified);
} acc.add_module(ctx, path_ctx, next, name, vec![]);
} }
acc.add_super_keyword(ctx, *super_chain_len); acc.add_super_keyword(ctx, *super_chain_len);

View file

@ -287,24 +287,22 @@ fn expand(
&spec_attr, &spec_attr,
fake_ident_token.clone(), fake_ident_token.clone(),
), ),
) { ) && let Some((fake_mapped_token, _)) =
if let Some((fake_mapped_token, _)) = fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank)
fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank) {
{ return Some(ExpansionResult {
return Some(ExpansionResult { original_file: original_file.value,
original_file: original_file.value, speculative_file,
speculative_file, original_offset,
original_offset, speculative_offset: fake_ident_token.text_range().start(),
speculative_offset: fake_ident_token.text_range().start(), fake_ident_token,
fake_ident_token, derive_ctx: Some((
derive_ctx: Some(( actual_expansion,
actual_expansion, fake_expansion,
fake_expansion, fake_mapped_token.text_range().start(),
fake_mapped_token.text_range().start(), orig_attr,
orig_attr, )),
)), });
});
}
} }
if let Some(spec_adt) = if let Some(spec_adt) =
@ -535,14 +533,13 @@ fn analyze<'db>(
NameRefKind::Path(PathCompletionCtx { kind: PathKind::Expr { .. }, path, .. }, ..), NameRefKind::Path(PathCompletionCtx { kind: PathKind::Expr { .. }, path, .. }, ..),
.. ..
} = &nameref_ctx } = &nameref_ctx
&& is_in_token_of_for_loop(path)
{ {
if is_in_token_of_for_loop(path) { // for pat $0
// for pat $0 // there is nothing to complete here except `in` keyword
// there is nothing to complete here except `in` keyword // don't bother populating the context
// don't bother populating the context // Ideally this special casing wouldn't be needed, but the parser recovers
// Ideally this special casing wouldn't be needed, but the parser recovers return None;
return None;
}
} }
qual_ctx = qualifier_ctx; qual_ctx = qualifier_ctx;
@ -951,29 +948,26 @@ fn classify_name_ref<'db>(
let inbetween_body_and_decl_check = |node: SyntaxNode| { let inbetween_body_and_decl_check = |node: SyntaxNode| {
if let Some(NodeOrToken::Node(n)) = if let Some(NodeOrToken::Node(n)) =
syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev) syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
&& let Some(item) = ast::Item::cast(n)
{ {
if let Some(item) = ast::Item::cast(n) { let is_inbetween = match &item {
let is_inbetween = match &item { ast::Item::Const(it) => it.body().is_none() && it.semicolon_token().is_none(),
ast::Item::Const(it) => it.body().is_none() && it.semicolon_token().is_none(), ast::Item::Enum(it) => it.variant_list().is_none(),
ast::Item::Enum(it) => it.variant_list().is_none(), ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
ast::Item::ExternBlock(it) => it.extern_item_list().is_none(), ast::Item::Fn(it) => it.body().is_none() && it.semicolon_token().is_none(),
ast::Item::Fn(it) => it.body().is_none() && it.semicolon_token().is_none(), ast::Item::Impl(it) => it.assoc_item_list().is_none(),
ast::Item::Impl(it) => it.assoc_item_list().is_none(), ast::Item::Module(it) => it.item_list().is_none() && it.semicolon_token().is_none(),
ast::Item::Module(it) => { ast::Item::Static(it) => it.body().is_none(),
it.item_list().is_none() && it.semicolon_token().is_none() ast::Item::Struct(it) => {
} it.field_list().is_none() && it.semicolon_token().is_none()
ast::Item::Static(it) => it.body().is_none(),
ast::Item::Struct(it) => {
it.field_list().is_none() && it.semicolon_token().is_none()
}
ast::Item::Trait(it) => it.assoc_item_list().is_none(),
ast::Item::TypeAlias(it) => it.ty().is_none() && it.semicolon_token().is_none(),
ast::Item::Union(it) => it.record_field_list().is_none(),
_ => false,
};
if is_inbetween {
return Some(item);
} }
ast::Item::Trait(it) => it.assoc_item_list().is_none(),
ast::Item::TypeAlias(it) => it.ty().is_none() && it.semicolon_token().is_none(),
ast::Item::Union(it) => it.record_field_list().is_none(),
_ => false,
};
if is_inbetween {
return Some(item);
} }
} }
None None
@ -1502,10 +1496,10 @@ fn classify_name_ref<'db>(
} }
}; };
} }
} else if let Some(segment) = path.segment() { } else if let Some(segment) = path.segment()
if segment.coloncolon_token().is_some() { && segment.coloncolon_token().is_some()
path_ctx.qualified = Qualified::Absolute; {
} path_ctx.qualified = Qualified::Absolute;
} }
let mut qualifier_ctx = QualifierCtx::default(); let mut qualifier_ctx = QualifierCtx::default();
@ -1530,38 +1524,30 @@ fn classify_name_ref<'db>(
if let Some(top) = top_node { if let Some(top) = top_node {
if let Some(NodeOrToken::Node(error_node)) = if let Some(NodeOrToken::Node(error_node)) =
syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev) syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
&& error_node.kind() == SyntaxKind::ERROR
{ {
if error_node.kind() == SyntaxKind::ERROR { for token in error_node.children_with_tokens().filter_map(NodeOrToken::into_token) {
for token in match token.kind() {
error_node.children_with_tokens().filter_map(NodeOrToken::into_token) SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token),
{ SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token),
match token.kind() { SyntaxKind::SAFE_KW => qualifier_ctx.safe_tok = Some(token),
SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token), _ => {}
SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token),
SyntaxKind::SAFE_KW => qualifier_ctx.safe_tok = Some(token),
_ => {}
}
} }
qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
} }
qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
} }
if let PathKind::Item { .. } = path_ctx.kind { if let PathKind::Item { .. } = path_ctx.kind
if qualifier_ctx.none() { && qualifier_ctx.none()
if let Some(t) = top.first_token() { && let Some(t) = top.first_token()
if let Some(prev) = t && let Some(prev) =
.prev_token() t.prev_token().and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
.and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev)) && ![T![;], T!['}'], T!['{']].contains(&prev.kind())
{ {
if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) { // This was inferred to be an item position path, but it seems
// This was inferred to be an item position path, but it seems // to be part of some other broken node which leaked into an item
// to be part of some other broken node which leaked into an item // list
// list return None;
return None;
}
}
}
}
} }
} }
} }

View file

@ -636,10 +636,10 @@ impl Builder {
} }
pub(crate) fn set_detail(&mut self, detail: Option<impl Into<String>>) -> &mut Builder { pub(crate) fn set_detail(&mut self, detail: Option<impl Into<String>>) -> &mut Builder {
self.detail = detail.map(Into::into); self.detail = detail.map(Into::into);
if let Some(detail) = &self.detail { if let Some(detail) = &self.detail
if never!(detail.contains('\n'), "multiline detail:\n{}", detail) { && never!(detail.contains('\n'), "multiline detail:\n{}", detail)
self.detail = Some(detail.split('\n').next().unwrap().to_owned()); {
} self.detail = Some(detail.split('\n').next().unwrap().to_owned());
} }
self self
} }

View file

@ -208,9 +208,9 @@ pub fn completions(
// when the user types a bare `_` (that is it does not belong to an identifier) // when the user types a bare `_` (that is it does not belong to an identifier)
// the user might just wanted to type a `_` for type inference or pattern discarding // the user might just wanted to type a `_` for type inference or pattern discarding
// so try to suppress completions in those cases // so try to suppress completions in those cases
if trigger_character == Some('_') && ctx.original_token.kind() == syntax::SyntaxKind::UNDERSCORE if trigger_character == Some('_')
{ && ctx.original_token.kind() == syntax::SyntaxKind::UNDERSCORE
if let CompletionAnalysis::NameRef(NameRefContext { && let CompletionAnalysis::NameRef(NameRefContext {
kind: kind:
NameRefKind::Path( NameRefKind::Path(
path_ctx @ PathCompletionCtx { path_ctx @ PathCompletionCtx {
@ -220,11 +220,9 @@ pub fn completions(
), ),
.. ..
}) = analysis }) = analysis
{ && path_ctx.is_trivial_path()
if path_ctx.is_trivial_path() { {
return None; return None;
}
}
} }
{ {

View file

@ -164,19 +164,18 @@ pub(crate) fn render_field(
let expected_fn_type = let expected_fn_type =
ctx.completion.expected_type.as_ref().is_some_and(|ty| ty.is_fn() || ty.is_closure()); ctx.completion.expected_type.as_ref().is_some_and(|ty| ty.is_fn() || ty.is_closure());
if !expected_fn_type { if !expected_fn_type
if let Some(receiver) = &dot_access.receiver { && let Some(receiver) = &dot_access.receiver
if let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) { && let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone())
builder.insert(receiver.syntax().text_range().start(), "(".to_owned()); {
builder.insert(ctx.source_range().end(), ")".to_owned()); builder.insert(receiver.syntax().text_range().start(), "(".to_owned());
builder.insert(ctx.source_range().end(), ")".to_owned());
let is_parens_needed = let is_parens_needed =
!matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
if is_parens_needed { if is_parens_needed {
builder.insert(ctx.source_range().end(), "()".to_owned()); builder.insert(ctx.source_range().end(), "()".to_owned());
}
}
} }
} }
@ -184,12 +183,11 @@ pub(crate) fn render_field(
} else { } else {
item.insert_text(field_with_receiver(receiver.as_deref(), &escaped_name)); item.insert_text(field_with_receiver(receiver.as_deref(), &escaped_name));
} }
if let Some(receiver) = &dot_access.receiver { if let Some(receiver) = &dot_access.receiver
if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) { && let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone())
if let Some(ref_mode) = compute_ref_match(ctx.completion, ty) { && let Some(ref_mode) = compute_ref_match(ctx.completion, ty)
item.ref_match(ref_mode, original.syntax().text_range().start()); {
} item.ref_match(ref_mode, original.syntax().text_range().start());
}
} }
item.doc_aliases(ctx.doc_aliases); item.doc_aliases(ctx.doc_aliases);
item.build(db) item.build(db)
@ -437,26 +435,21 @@ fn render_resolution_path(
path_ctx, path_ctx,
PathCompletionCtx { kind: PathKind::Type { .. }, has_type_args: false, .. } PathCompletionCtx { kind: PathKind::Type { .. }, has_type_args: false, .. }
) && config.callable.is_some(); ) && config.callable.is_some();
if type_path_no_ty_args { if type_path_no_ty_args && let Some(cap) = cap {
if let Some(cap) = cap { let has_non_default_type_params = match resolution {
let has_non_default_type_params = match resolution { ScopeDef::ModuleDef(hir::ModuleDef::Adt(it)) => it.has_non_default_type_params(db),
ScopeDef::ModuleDef(hir::ModuleDef::Adt(it)) => it.has_non_default_type_params(db), ScopeDef::ModuleDef(hir::ModuleDef::TypeAlias(it)) => {
ScopeDef::ModuleDef(hir::ModuleDef::TypeAlias(it)) => { it.has_non_default_type_params(db)
it.has_non_default_type_params(db)
}
_ => false,
};
if has_non_default_type_params {
cov_mark::hit!(inserts_angle_brackets_for_generics);
item.lookup_by(name.clone())
.label(SmolStr::from_iter([&name, "<…>"]))
.trigger_call_info()
.insert_snippet(
cap,
format!("{}<$0>", local_name.display(db, completion.edition)),
);
} }
_ => false,
};
if has_non_default_type_params {
cov_mark::hit!(inserts_angle_brackets_for_generics);
item.lookup_by(name.clone())
.label(SmolStr::from_iter([&name, "<…>"]))
.trigger_call_info()
.insert_snippet(cap, format!("{}<$0>", local_name.display(db, completion.edition)));
} }
} }
@ -634,23 +627,24 @@ fn compute_ref_match(
if expected_type.could_unify_with(ctx.db, completion_ty) { if expected_type.could_unify_with(ctx.db, completion_ty) {
return None; return None;
} }
if let Some(expected_without_ref) = &expected_without_ref { if let Some(expected_without_ref) = &expected_without_ref
if completion_ty.autoderef(ctx.db).any(|ty| ty == *expected_without_ref) { && completion_ty.autoderef(ctx.db).any(|ty| ty == *expected_without_ref)
cov_mark::hit!(suggest_ref); {
let mutability = if expected_type.is_mutable_reference() { cov_mark::hit!(suggest_ref);
hir::Mutability::Mut let mutability = if expected_type.is_mutable_reference() {
} else { hir::Mutability::Mut
hir::Mutability::Shared } else {
}; hir::Mutability::Shared
return Some(CompletionItemRefMode::Reference(mutability)); };
} return Some(CompletionItemRefMode::Reference(mutability));
} }
if let Some(completion_without_ref) = completion_without_ref { if let Some(completion_without_ref) = completion_without_ref
if completion_without_ref == *expected_type && completion_without_ref.is_copy(ctx.db) { && completion_without_ref == *expected_type
cov_mark::hit!(suggest_deref); && completion_without_ref.is_copy(ctx.db)
return Some(CompletionItemRefMode::Dereference); {
} cov_mark::hit!(suggest_deref);
return Some(CompletionItemRefMode::Dereference);
} }
None None
@ -664,10 +658,10 @@ fn path_ref_match(
) { ) {
if let Some(original_path) = &path_ctx.original_path { if let Some(original_path) = &path_ctx.original_path {
// At least one char was typed by the user already, in that case look for the original path // At least one char was typed by the user already, in that case look for the original path
if let Some(original_path) = completion.sema.original_ast_node(original_path.clone()) { if let Some(original_path) = completion.sema.original_ast_node(original_path.clone())
if let Some(ref_mode) = compute_ref_match(completion, ty) { && let Some(ref_mode) = compute_ref_match(completion, ty)
item.ref_match(ref_mode, original_path.syntax().text_range().start()); {
} item.ref_match(ref_mode, original_path.syntax().text_range().start());
} }
} else { } else {
// completion requested on an empty identifier, there is no path here yet. // completion requested on an empty identifier, there is no path here yet.

Some files were not shown because too many files have changed in this diff Show more