Skip redundant path search in resolve_completion_edits

This commit is contained in:
Lukas Wirth 2025-01-25 12:30:20 +01:00
parent 5df0b592c1
commit ae74cc3b88
9 changed files with 27 additions and 50 deletions

View file

@ -82,8 +82,7 @@ pub struct CompletionItem {
pub ref_match: Option<(CompletionItemRefMode, TextSize)>, pub ref_match: Option<(CompletionItemRefMode, TextSize)>,
/// The import data to add to completion's edits. /// The import data to add to completion's edits.
/// (ImportPath, LastSegment) pub import_to_add: SmallVec<[String; 1]>,
pub import_to_add: SmallVec<[(String, String); 1]>,
} }
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
@ -570,12 +569,7 @@ impl Builder {
let import_to_add = self let import_to_add = self
.imports_to_add .imports_to_add
.into_iter() .into_iter()
.filter_map(|import| { .map(|import| import.import_path.display(db, self.edition).to_string())
Some((
import.import_path.display(db, self.edition).to_string(),
import.import_path.segments().last()?.display(db, self.edition).to_string(),
))
})
.collect(); .collect();
CompletionItem { CompletionItem {

View file

@ -10,17 +10,13 @@ mod snippet;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
use ide_db::text_edit::TextEdit;
use ide_db::{ use ide_db::{
helpers::mod_path_to_ast, imports::insert_use::{self, ImportScope},
imports::{
import_assets::NameToImport,
insert_use::{self, ImportScope},
},
items_locator,
syntax_helpers::tree_diff::diff, syntax_helpers::tree_diff::diff,
text_edit::TextEdit,
FilePosition, FxHashSet, RootDatabase, FilePosition, FxHashSet, RootDatabase,
}; };
use syntax::ast::make;
use crate::{ use crate::{
completions::Completions, completions::Completions,
@ -272,7 +268,7 @@ pub fn resolve_completion_edits(
db: &RootDatabase, db: &RootDatabase,
config: &CompletionConfig<'_>, config: &CompletionConfig<'_>,
FilePosition { file_id, offset }: FilePosition, FilePosition { file_id, offset }: FilePosition,
imports: impl IntoIterator<Item = (String, String)>, imports: impl IntoIterator<Item = String>,
) -> Option<Vec<TextEdit>> { ) -> Option<Vec<TextEdit>> {
let _p = tracing::info_span!("resolve_completion_edits").entered(); let _p = tracing::info_span!("resolve_completion_edits").entered();
let sema = hir::Semantics::new(db); let sema = hir::Semantics::new(db);
@ -289,27 +285,12 @@ pub fn resolve_completion_edits(
let new_ast = scope.clone_for_update(); let new_ast = scope.clone_for_update();
let mut import_insert = TextEdit::builder(); let mut import_insert = TextEdit::builder();
let cfg = config.import_path_config(true); imports.into_iter().for_each(|full_import_path| {
imports.into_iter().for_each(|(full_import_path, imported_name)| {
let items_with_name = items_locator::items_with_name(
&sema,
current_crate,
NameToImport::exact_case_sensitive(imported_name),
items_locator::AssocSearchMode::Include,
);
let import = items_with_name
.filter_map(|candidate| {
current_module.find_use_path(db, candidate, config.insert_use.prefix_kind, cfg)
})
.find(|mod_path| mod_path.display(db, current_edition).to_string() == full_import_path);
if let Some(import_path) = import {
insert_use::insert_use( insert_use::insert_use(
&new_ast, &new_ast,
mod_path_to_ast(&import_path, current_edition), make::path_from_text_with_edition(&full_import_path, current_edition),
&config.insert_use, &config.insert_use,
); );
}
}); });
diff(scope.as_syntax_node(), new_ast.as_syntax_node()).into_text_edit(&mut import_insert); diff(scope.as_syntax_node(), new_ast.as_syntax_node()).into_text_edit(&mut import_insert);

View file

@ -672,7 +672,7 @@ impl Analysis {
&self, &self,
config: &CompletionConfig<'_>, config: &CompletionConfig<'_>,
position: FilePosition, position: FilePosition,
imports: impl IntoIterator<Item = (String, String)> + std::panic::UnwindSafe, imports: impl IntoIterator<Item = String> + std::panic::UnwindSafe,
) -> Cancellable<Vec<TextEdit>> { ) -> Cancellable<Vec<TextEdit>> {
Ok(self Ok(self
.with_db(|db| ide_completion::resolve_completion_edits(db, config, position, imports))? .with_db(|db| ide_completion::resolve_completion_edits(db, config, position, imports))?

View file

@ -1154,10 +1154,7 @@ pub(crate) fn handle_completion_resolve(
.resolve_completion_edits( .resolve_completion_edits(
&forced_resolve_completions_config, &forced_resolve_completions_config,
position, position,
resolve_data resolve_data.imports.into_iter().map(|import| import.full_import_path),
.imports
.into_iter()
.map(|import| (import.full_import_path, import.imported_name)),
)? )?
.into_iter() .into_iter()
.flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel))) .flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))

View file

@ -142,9 +142,8 @@ fn completion_item_hash(item: &CompletionItem, is_ref_completion: bool) -> [u8;
hasher.update(prefix); hasher.update(prefix);
hasher.update(u32::from(*text_size).to_le_bytes()); hasher.update(u32::from(*text_size).to_le_bytes());
} }
for (import_path, import_name) in &item.import_to_add { for import_path in &item.import_to_add {
hasher.update(import_path); hasher.update(import_path);
hasher.update(import_name);
} }
hasher.finalize() hasher.finalize()
} }

View file

@ -850,7 +850,6 @@ pub struct InlayHintResolveData {
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct CompletionImport { pub struct CompletionImport {
pub full_import_path: String, pub full_import_path: String,
pub imported_name: String,
} }
#[derive(Debug, Deserialize, Default)] #[derive(Debug, Deserialize, Default)]

View file

@ -394,10 +394,7 @@ fn completion_item(
item.import_to_add item.import_to_add
.clone() .clone()
.into_iter() .into_iter()
.map(|(import_path, import_name)| lsp_ext::CompletionImport { .map(|import_path| lsp_ext::CompletionImport { full_import_path: import_path })
full_import_path: import_path,
imported_name: import_name,
})
.collect() .collect()
} else { } else {
Vec::new() Vec::new()

View file

@ -411,6 +411,11 @@ pub fn path_from_text(text: &str) -> ast::Path {
ast_from_text(&format!("fn main() {{ let test: {text}; }}")) ast_from_text(&format!("fn main() {{ let test: {text}; }}"))
} }
// FIXME: should not be pub
pub fn path_from_text_with_edition(text: &str, edition: Edition) -> ast::Path {
ast_from_text_with_edition(&format!("fn main() {{ let test: {text}; }}"), edition)
}
pub fn use_tree_glob() -> ast::UseTree { pub fn use_tree_glob() -> ast::UseTree {
ast_from_text("use *;") ast_from_text("use *;")
} }
@ -1230,7 +1235,12 @@ pub fn token_tree(
#[track_caller] #[track_caller]
fn ast_from_text<N: AstNode>(text: &str) -> N { fn ast_from_text<N: AstNode>(text: &str) -> N {
let parse = SourceFile::parse(text, Edition::CURRENT); ast_from_text_with_edition(text, Edition::CURRENT)
}
#[track_caller]
fn ast_from_text_with_edition<N: AstNode>(text: &str, edition: Edition) -> N {
let parse = SourceFile::parse(text, edition);
let node = match parse.tree().syntax().descendants().find_map(N::cast) { let node = match parse.tree().syntax().descendants().find_map(N::cast) {
Some(it) => it, Some(it) => it,
None => { None => {

View file

@ -1,5 +1,5 @@
<!--- <!---
lsp/ext.rs hash: 2d8604825c458288 lsp/ext.rs hash: af70cce5d6905e39
If you need to change the above hash to make the test pass, please check if you If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue: need to adjust this doc as well and ping this issue: