This commit is contained in:
Lukas Wirth 2021-09-13 18:50:19 +02:00
parent 1ef4b2cb7b
commit a044175412
18 changed files with 91 additions and 86 deletions

View file

@ -410,52 +410,60 @@ impl<'a> TyLoweringContext<'a> {
) -> (Ty, Option<TypeNs>) { ) -> (Ty, Option<TypeNs>) {
let ty = match resolution { let ty = match resolution {
TypeNs::TraitId(trait_) => { TypeNs::TraitId(trait_) => {
let ty = if remaining_segments.len() == 1 { let ty = match remaining_segments.len() {
let trait_ref = 1 => {
self.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None); let trait_ref =
let segment = remaining_segments.first().unwrap(); self.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
let found = self let segment = remaining_segments.first().unwrap();
.db let found = self
.trait_data(trait_ref.hir_trait_id()) .db
.associated_type_by_name(segment.name); .trait_data(trait_ref.hir_trait_id())
match found { .associated_type_by_name(segment.name);
Some(associated_ty) => { match found {
// FIXME handle type parameters on the segment Some(associated_ty) => {
TyKind::Alias(AliasTy::Projection(ProjectionTy { // FIXME handle type parameters on the segment
associated_ty_id: to_assoc_type_id(associated_ty), TyKind::Alias(AliasTy::Projection(ProjectionTy {
substitution: trait_ref.substitution, associated_ty_id: to_assoc_type_id(associated_ty),
})) substitution: trait_ref.substitution,
.intern(&Interner) }))
} .intern(&Interner)
None => { }
// FIXME: report error (associated type not found) None => {
TyKind::Error.intern(&Interner) // FIXME: report error (associated type not found)
TyKind::Error.intern(&Interner)
}
} }
} }
} else if remaining_segments.len() > 1 { 0 => {
// FIXME report error (ambiguous associated type) let self_ty = Some(
TyKind::Error.intern(&Interner) TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
} else { .intern(&Interner),
let self_ty = Some( );
TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)) let trait_ref = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
.intern(&Interner), ctx.lower_trait_ref_from_resolved_path(
); trait_,
let trait_ref = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { resolved_segment,
ctx.lower_trait_ref_from_resolved_path(trait_, resolved_segment, self_ty) self_ty,
}); )
let dyn_ty = DynTy { });
bounds: crate::make_only_type_binders( let dyn_ty = DynTy {
1, bounds: crate::make_only_type_binders(
QuantifiedWhereClauses::from_iter( 1,
&Interner, QuantifiedWhereClauses::from_iter(
Some(crate::wrap_empty_binders(WhereClause::Implemented( &Interner,
trait_ref, Some(crate::wrap_empty_binders(WhereClause::Implemented(
))), trait_ref,
))),
),
), ),
), lifetime: static_lifetime(),
lifetime: static_lifetime(), };
}; TyKind::Dyn(dyn_ty).intern(&Interner)
TyKind::Dyn(dyn_ty).intern(&Interner) }
_ => {
// FIXME report error (ambiguous associated type)
TyKind::Error.intern(&Interner)
}
}; };
return (ty, None); return (ty, None);
} }

View file

@ -697,7 +697,7 @@ fn iterate_trait_method_candidates(
} }
known_implemented = true; known_implemented = true;
// FIXME: we shouldn't be ignoring the binders here // FIXME: we shouldn't be ignoring the binders here
callback(&self_ty, *item)? callback(self_ty, *item)?
} }
} }
ControlFlow::Continue(()) ControlFlow::Continue(())
@ -773,7 +773,7 @@ fn iterate_inherent_methods(
cov_mark::hit!(impl_self_type_match_without_receiver); cov_mark::hit!(impl_self_type_match_without_receiver);
continue; continue;
} }
let receiver_ty = receiver_ty.unwrap_or(&self_ty); let receiver_ty = receiver_ty.unwrap_or(self_ty);
callback(receiver_ty, item)?; callback(receiver_ty, item)?;
} }
} }

View file

@ -167,7 +167,7 @@ impl Assists {
None None
}; };
let label = Label::new(label.into()); let label = Label::new(label);
let group = group.cloned(); let group = group.cloned();
self.buf.push(Assist { id, label, group, target, source_change }); self.buf.push(Assist { id, label, group, target, source_change });
Some(()) Some(())

View file

@ -198,7 +198,7 @@ fn option_variants(
sema: &Semantics<RootDatabase>, sema: &Semantics<RootDatabase>,
expr: &SyntaxNode, expr: &SyntaxNode,
) -> Option<(hir::Variant, hir::Variant)> { ) -> Option<(hir::Variant, hir::Variant)> {
let fam = FamousDefs(&sema, sema.scope(expr).krate()); let fam = FamousDefs(sema, sema.scope(expr).krate());
let option_variants = fam.core_option_Option()?.variants(sema.db); let option_variants = fam.core_option_Option()?.variants(sema.db);
match &*option_variants { match &*option_variants {
&[variant0, variant1] => Some(if variant0.name(sema.db) == known::None { &[variant0, variant1] => Some(if variant0.name(sema.db) == known::None {
@ -224,7 +224,7 @@ fn is_invalid_body(
invalid invalid
}); });
if !invalid { if !invalid {
for_each_tail_expr(&expr, &mut |e| { for_each_tail_expr(expr, &mut |e| {
if invalid { if invalid {
return; return;
} }

View file

@ -110,7 +110,9 @@ fn edit_struct_def(
} else { } else {
edit.insert(tuple_fields_text_range.start(), ast::make::tokens::single_space().text()); edit.insert(tuple_fields_text_range.start(), ast::make::tokens::single_space().text());
} }
strukt.semicolon_token().map(|t| edit.delete(t.text_range())); if let Some(t) = strukt.semicolon_token() {
edit.delete(t.text_range());
}
} else { } else {
edit.insert(tuple_fields_text_range.start(), ast::make::tokens::single_space().text()); edit.insert(tuple_fields_text_range.start(), ast::make::tokens::single_space().text());
} }

View file

@ -227,14 +227,12 @@ fn find_imported_defs(ctx: &AssistContext, star: SyntaxToken) -> Option<Vec<Def>
Some( Some(
[Direction::Prev, Direction::Next] [Direction::Prev, Direction::Next]
.iter() .iter()
.map(|dir| { .flat_map(|dir| {
parent_use_item_syntax parent_use_item_syntax
.siblings(dir.to_owned()) .siblings(dir.to_owned())
.filter(|n| ast::Use::can_cast(n.kind())) .filter(|n| ast::Use::can_cast(n.kind()))
}) })
.flatten() .flat_map(|n| n.descendants().filter_map(ast::NameRef::cast))
.filter_map(|n| Some(n.descendants().filter_map(ast::NameRef::cast)))
.flatten()
.filter_map(|r| match NameRefClass::classify(&ctx.sema, &r)? { .filter_map(|r| match NameRefClass::classify(&ctx.sema, &r)? {
NameRefClass::Definition(Definition::ModuleDef(def)) => Some(Def::ModuleDef(def)), NameRefClass::Definition(Definition::ModuleDef(def)) => Some(Def::ModuleDef(def)),
NameRefClass::Definition(Definition::Macro(def)) => Some(Def::MacroDef(def)), NameRefClass::Definition(Definition::Macro(def)) => Some(Def::MacroDef(def)),

View file

@ -885,12 +885,9 @@ fn reference_is_exclusive(
/// checks if this expr requires `&mut` access, recurses on field access /// checks if this expr requires `&mut` access, recurses on field access
fn expr_require_exclusive_access(ctx: &AssistContext, expr: &ast::Expr) -> Option<bool> { fn expr_require_exclusive_access(ctx: &AssistContext, expr: &ast::Expr) -> Option<bool> {
match expr { if let ast::Expr::MacroCall(_) = expr {
ast::Expr::MacroCall(_) => { // FIXME: expand macro and check output for mutable usages of the variable?
// FIXME: expand macro and check output for mutable usages of the variable? return None;
return None;
}
_ => (),
} }
let parent = expr.syntax().parent()?; let parent = expr.syntax().parent()?;

View file

@ -231,7 +231,7 @@ fn create_struct_def(
let variant_attrs = attrs_and_docs(variant.syntax()) let variant_attrs = attrs_and_docs(variant.syntax())
.map(|tok| match tok.kind() { .map(|tok| match tok.kind() {
WHITESPACE => make::tokens::single_newline().into(), WHITESPACE => make::tokens::single_newline().into(),
_ => tok.into(), _ => tok,
}) })
.collect(); .collect();
ted::insert_all(Position::first_child_of(strukt.syntax()), variant_attrs); ted::insert_all(Position::first_child_of(strukt.syntax()), variant_attrs);
@ -251,12 +251,14 @@ fn update_variant(variant: &ast::Variant, generic: Option<ast::GenericParamList>
Some(gpl) => { Some(gpl) => {
let gpl = gpl.clone_for_update(); let gpl = gpl.clone_for_update();
gpl.generic_params().for_each(|gp| { gpl.generic_params().for_each(|gp| {
match gp { let tbl = match gp {
ast::GenericParam::LifetimeParam(it) => it.type_bound_list(), ast::GenericParam::LifetimeParam(it) => it.type_bound_list(),
ast::GenericParam::TypeParam(it) => it.type_bound_list(), ast::GenericParam::TypeParam(it) => it.type_bound_list(),
ast::GenericParam::ConstParam(_) => return, ast::GenericParam::ConstParam(_) => return,
};
if let Some(tbl) = tbl {
tbl.remove();
} }
.map(|it| it.remove());
}); });
make::ty(&format!("{}<{}>", name.text(), gpl.generic_params().join(", "))) make::ty(&format!("{}<{}>", name.text(), gpl.generic_params().join(", ")))
} }

View file

@ -79,7 +79,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option
if let Anchor::Replace(stmt) = anchor { if let Anchor::Replace(stmt) = anchor {
cov_mark::hit!(test_extract_var_expr_stmt); cov_mark::hit!(test_extract_var_expr_stmt);
if stmt.semicolon_token().is_none() { if stmt.semicolon_token().is_none() {
buf.push_str(";"); buf.push(';');
} }
match ctx.config.snippet_cap { match ctx.config.snippet_cap {
Some(cap) => { Some(cap) => {
@ -92,7 +92,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option
return; return;
} }
buf.push_str(";"); buf.push(';');
// We want to maintain the indent level, // We want to maintain the indent level,
// but we do not want to duplicate possible // but we do not want to duplicate possible

View file

@ -109,7 +109,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
}; };
let function_builder = FunctionBuilder::from_call(ctx, &call, &path, target_module, target)?; let function_builder = FunctionBuilder::from_call(ctx, &call, &path, target_module, target)?;
let text_range = call.syntax().text_range(); let text_range = call.syntax().text_range();
let label = format!("Generate {} function", function_builder.fn_name.clone()); let label = format!("Generate {} function", function_builder.fn_name);
add_func_to_accumulator( add_func_to_accumulator(
acc, acc,
ctx, ctx,
@ -139,7 +139,7 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
FunctionBuilder::from_method_call(ctx, &call, &fn_name, target_module, target)?; FunctionBuilder::from_method_call(ctx, &call, &fn_name, target_module, target)?;
let text_range = call.syntax().text_range(); let text_range = call.syntax().text_range();
let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
let label = format!("Generate {} method", function_builder.fn_name.clone()); let label = format!("Generate {} method", function_builder.fn_name);
add_func_to_accumulator( add_func_to_accumulator(
acc, acc,
ctx, ctx,
@ -369,7 +369,7 @@ fn make_return_type(
} }
} }
}; };
let ret_type = ret_ty.map(|rt| make::ret_type(rt)); let ret_type = ret_ty.map(make::ret_type);
(ret_type, should_focus_return_type) (ret_type, should_focus_return_type)
} }
@ -386,7 +386,7 @@ fn get_fn_target(
file = in_file; file = in_file;
target target
} }
None => next_space_for_fn_after_call_site(FuncExpr::Func(call.clone()))?, None => next_space_for_fn_after_call_site(FuncExpr::Func(call))?,
}; };
Some((target.clone(), file, get_insert_offset(&target))) Some((target.clone(), file, get_insert_offset(&target)))
} }
@ -397,7 +397,7 @@ fn get_method_target(
impl_: &Option<ast::Impl>, impl_: &Option<ast::Impl>,
) -> Option<(GeneratedFunctionTarget, TextSize)> { ) -> Option<(GeneratedFunctionTarget, TextSize)> {
let target = match impl_ { let target = match impl_ {
Some(impl_) => next_space_for_fn_in_impl(&impl_)?, Some(impl_) => next_space_for_fn_in_impl(impl_)?,
None => { None => {
next_space_for_fn_in_module(ctx.sema.db, &target_module.definition_source(ctx.sema.db))? next_space_for_fn_in_module(ctx.sema.db, &target_module.definition_source(ctx.sema.db))?
.1 .1
@ -448,7 +448,7 @@ fn fn_args(
}); });
arg_types.push(match fn_arg_type(ctx, target_module, &arg) { arg_types.push(match fn_arg_type(ctx, target_module, &arg) {
Some(ty) => { Some(ty) => {
if ty.len() > 0 && ty.starts_with('&') { if !ty.is_empty() && ty.starts_with('&') {
if let Some((new_ty, _)) = useless_type_special_case("", &ty[1..].to_owned()) { if let Some((new_ty, _)) = useless_type_special_case("", &ty[1..].to_owned()) {
new_ty new_ty
} else { } else {

View file

@ -199,7 +199,7 @@ pub(crate) fn inline_(
.sema .sema
.type_of_expr(&expr) .type_of_expr(&expr)
.filter(TypeInfo::has_adjustment) .filter(TypeInfo::has_adjustment)
.and_then(|_| param_ty); .and(param_ty);
body.push_front( body.push_front(
make::let_stmt(pat, ty, Some(expr)).clone_for_update().into(), make::let_stmt(pat, ty, Some(expr)).clone_for_update().into(),
) )

View file

@ -77,7 +77,7 @@ fn generate_fn_def_assist(
}) })
.collect(); .collect();
match fn_params_without_lifetime.len() { match fn_params_without_lifetime.len() {
1 => Some(fn_params_without_lifetime.into_iter().nth(0)?), 1 => Some(fn_params_without_lifetime.into_iter().next()?),
0 => None, 0 => None,
// multiple unnnamed is invalid. assist is not applicable // multiple unnnamed is invalid. assist is not applicable
_ => return None, _ => return None,
@ -93,8 +93,9 @@ fn generate_fn_def_assist(
make::lifetime_param(new_lifetime_param.clone()).clone_for_update().into(), make::lifetime_param(new_lifetime_param.clone()).clone_for_update().into(),
); );
ted::replace(lifetime.syntax(), new_lifetime_param.clone_for_update().syntax()); ted::replace(lifetime.syntax(), new_lifetime_param.clone_for_update().syntax());
loc_needing_lifetime if let Some(position) = loc_needing_lifetime {
.map(|position| ted::insert(position, new_lifetime_param.clone_for_update().syntax())); ted::insert(position, new_lifetime_param.clone_for_update().syntax());
}
}) })
} }

View file

@ -35,7 +35,7 @@ pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option<
if matches!(value, Cow::Borrowed(_)) { if matches!(value, Cow::Borrowed(_)) {
// Avoid replacing the whole string to better position the cursor. // Avoid replacing the whole string to better position the cursor.
edit.insert(token.syntax().text_range().start(), format!("r{}", hashes)); edit.insert(token.syntax().text_range().start(), format!("r{}", hashes));
edit.insert(token.syntax().text_range().end(), format!("{}", hashes)); edit.insert(token.syntax().text_range().end(), hashes);
} else { } else {
edit.replace( edit.replace(
token.syntax().text_range(), token.syntax().text_range(),

View file

@ -137,7 +137,7 @@ fn process_usage(
return Some(range_to_remove(arg.syntax())); return Some(range_to_remove(arg.syntax()));
} }
return None; None
} }
fn range_to_remove(node: &SyntaxNode) -> TextRange { fn range_to_remove(node: &SyntaxNode) -> TextRange {

View file

@ -126,7 +126,7 @@ fn make_else_arm(
if let Some(else_block) = else_block { if let Some(else_block) = else_block {
let pattern = if let [(Either::Left(pat), _)] = conditionals { let pattern = if let [(Either::Left(pat), _)] = conditionals {
ctx.sema ctx.sema
.type_of_pat(&pat) .type_of_pat(pat)
.and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted())) .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted()))
.zip(Some(pat)) .zip(Some(pat))
} else { } else {
@ -134,7 +134,7 @@ fn make_else_arm(
}; };
let pattern = match pattern { let pattern = match pattern {
Some((it, pat)) => { Some((it, pat)) => {
if does_pat_match_variant(&pat, &it.sad_pattern()) { if does_pat_match_variant(pat, &it.sad_pattern()) {
it.happy_pattern() it.happy_pattern()
} else { } else {
it.sad_pattern() it.sad_pattern()
@ -144,7 +144,7 @@ fn make_else_arm(
}; };
make::match_arm(iter::once(pattern), None, unwrap_trivial_block(else_block)) make::match_arm(iter::once(pattern), None, unwrap_trivial_block(else_block))
} else { } else {
make::match_arm(iter::once(make::wildcard_pat().into()), None, make::expr_unit().into()) make::match_arm(iter::once(make::wildcard_pat().into()), None, make::expr_unit())
} }
} }
@ -257,7 +257,7 @@ fn is_empty_expr(expr: &ast::Expr) -> bool {
} }
fn binds_name(sema: &hir::Semantics<RootDatabase>, pat: &ast::Pat) -> bool { fn binds_name(sema: &hir::Semantics<RootDatabase>, pat: &ast::Pat) -> bool {
let binds_name_v = |pat| binds_name(&sema, &pat); let binds_name_v = |pat| binds_name(sema, &pat);
match pat { match pat {
ast::Pat::IdentPat(pat) => !matches!( ast::Pat::IdentPat(pat) => !matches!(
pat.name().and_then(|name| NameClass::classify(sema, &name)), pat.name().and_then(|name| NameClass::classify(sema, &name)),

View file

@ -141,10 +141,7 @@ fn path_eq_no_generics(lhs: ast::Path, rhs: ast::Path) -> bool {
&& lhs && lhs
.name_ref() .name_ref()
.zip(rhs.name_ref()) .zip(rhs.name_ref())
.map_or(false, |(lhs, rhs)| lhs.text() == rhs.text()) => .map_or(false, |(lhs, rhs)| lhs.text() == rhs.text()) => {}
{
()
}
_ => return false, _ => return false,
} }

View file

@ -33,7 +33,7 @@ pub(crate) fn toggle_ignore(acc: &mut Assists, ctx: &AssistContext) -> Option<()
AssistId("toggle_ignore", AssistKind::None), AssistId("toggle_ignore", AssistKind::None),
"Ignore this test", "Ignore this test",
attr.syntax().text_range(), attr.syntax().text_range(),
|builder| builder.insert(attr.syntax().text_range().end(), &format!("\n#[ignore]")), |builder| builder.insert(attr.syntax().text_range().end(), "\n#[ignore]"),
), ),
Some(ignore_attr) => acc.add( Some(ignore_attr) => acc.add(
AssistId("toggle_ignore", AssistKind::None), AssistId("toggle_ignore", AssistKind::None),

View file

@ -130,7 +130,7 @@ impl ProcMacroServer {
.into_iter() .into_iter()
.map(|(name, kind)| ProcMacro { .map(|(name, kind)| ProcMacro {
process: self.process.clone(), process: self.process.clone(),
name: name.into(), name,
kind, kind,
dylib_path: dylib.path.clone(), dylib_path: dylib.path.clone(),
}) })