mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 20:42:04 +00:00
internal: rename hypothetical -> speculative
Lets steal this good naming from Roslyn before I forget about it yet again.
This commit is contained in:
parent
3926f60cb5
commit
45112aa8c0
5 changed files with 25 additions and 25 deletions
|
@ -120,10 +120,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
pub fn speculative_expand(
|
pub fn speculative_expand(
|
||||||
&self,
|
&self,
|
||||||
actual_macro_call: &ast::MacroCall,
|
actual_macro_call: &ast::MacroCall,
|
||||||
hypothetical_args: &ast::TokenTree,
|
speculative_args: &ast::TokenTree,
|
||||||
token_to_map: SyntaxToken,
|
token_to_map: SyntaxToken,
|
||||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||||
self.imp.speculative_expand(actual_macro_call, hypothetical_args, token_to_map)
|
self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
|
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
|
||||||
|
@ -335,7 +335,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
fn speculative_expand(
|
fn speculative_expand(
|
||||||
&self,
|
&self,
|
||||||
actual_macro_call: &ast::MacroCall,
|
actual_macro_call: &ast::MacroCall,
|
||||||
hypothetical_args: &ast::TokenTree,
|
speculative_args: &ast::TokenTree,
|
||||||
token_to_map: SyntaxToken,
|
token_to_map: SyntaxToken,
|
||||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||||
let sa = self.analyze(actual_macro_call.syntax());
|
let sa = self.analyze(actual_macro_call.syntax());
|
||||||
|
@ -344,10 +344,10 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
|
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
|
||||||
sa.resolver.resolve_path_as_macro(self.db.upcast(), &path)
|
sa.resolver.resolve_path_as_macro(self.db.upcast(), &path)
|
||||||
})?;
|
})?;
|
||||||
hir_expand::db::expand_hypothetical(
|
hir_expand::db::expand_speculative(
|
||||||
self.db.upcast(),
|
self.db.upcast(),
|
||||||
macro_call_id,
|
macro_call_id,
|
||||||
hypothetical_args,
|
speculative_args,
|
||||||
token_to_map,
|
token_to_map,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -131,15 +131,15 @@ pub trait AstDatabase: SourceDatabase {
|
||||||
/// used for completion, where we want to see what 'would happen' if we insert a
|
/// used for completion, where we want to see what 'would happen' if we insert a
|
||||||
/// token. The `token_to_map` mapped down into the expansion, with the mapped
|
/// token. The `token_to_map` mapped down into the expansion, with the mapped
|
||||||
/// token returned.
|
/// token returned.
|
||||||
pub fn expand_hypothetical(
|
pub fn expand_speculative(
|
||||||
db: &dyn AstDatabase,
|
db: &dyn AstDatabase,
|
||||||
actual_macro_call: MacroCallId,
|
actual_macro_call: MacroCallId,
|
||||||
hypothetical_args: &ast::TokenTree,
|
speculative_args: &ast::TokenTree,
|
||||||
token_to_map: SyntaxToken,
|
token_to_map: SyntaxToken,
|
||||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||||
let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax());
|
let (tt, tmap_1) = mbe::syntax_node_to_token_tree(speculative_args.syntax());
|
||||||
let range =
|
let range =
|
||||||
token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?;
|
token_to_map.text_range().checked_sub(speculative_args.syntax().text_range().start())?;
|
||||||
let token_id = tmap_1.token_by_range(range)?;
|
let token_id = tmap_1.token_by_range(range)?;
|
||||||
|
|
||||||
let macro_def = {
|
let macro_def = {
|
||||||
|
@ -147,12 +147,12 @@ pub fn expand_hypothetical(
|
||||||
db.macro_def(loc.def)?
|
db.macro_def(loc.def)?
|
||||||
};
|
};
|
||||||
|
|
||||||
let hypothetical_expansion = macro_def.expand(db, actual_macro_call, &tt);
|
let speculative_expansion = macro_def.expand(db, actual_macro_call, &tt);
|
||||||
|
|
||||||
let fragment_kind = macro_fragment_kind(db, actual_macro_call);
|
let fragment_kind = macro_fragment_kind(db, actual_macro_call);
|
||||||
|
|
||||||
let (node, tmap_2) =
|
let (node, tmap_2) =
|
||||||
mbe::token_tree_to_syntax_node(&hypothetical_expansion.value, fragment_kind).ok()?;
|
mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?;
|
||||||
|
|
||||||
let token_id = macro_def.map_id_down(token_id);
|
let token_id = macro_def.map_id_down(token_id);
|
||||||
let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?;
|
let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?;
|
||||||
|
@ -325,7 +325,7 @@ fn macro_expand_with_arg(
|
||||||
if let Some(eager) = &loc.eager {
|
if let Some(eager) = &loc.eager {
|
||||||
if arg.is_some() {
|
if arg.is_some() {
|
||||||
return ExpandResult::str_err(
|
return ExpandResult::str_err(
|
||||||
"hypothetical macro expansion not implemented for eager macro".to_owned(),
|
"speculative macro expansion not implemented for eager macro".to_owned(),
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
|
|
|
@ -1026,7 +1026,7 @@ pub(crate) fn trait_environment_query(
|
||||||
};
|
};
|
||||||
if let Some(AssocContainerId::TraitId(trait_id)) = container {
|
if let Some(AssocContainerId::TraitId(trait_id)) = container {
|
||||||
// add `Self: Trait<T1, T2, ...>` to the environment in trait
|
// add `Self: Trait<T1, T2, ...>` to the environment in trait
|
||||||
// function default implementations (and hypothetical code
|
// function default implementations (and speculative code
|
||||||
// inside consts or type aliases)
|
// inside consts or type aliases)
|
||||||
cov_mark::hit!(trait_self_implements_self);
|
cov_mark::hit!(trait_self_implements_self);
|
||||||
let substs = TyBuilder::type_params_subst(db, trait_id);
|
let substs = TyBuilder::type_params_subst(db, trait_id);
|
||||||
|
|
|
@ -196,46 +196,46 @@ impl<'a> CompletionContext<'a> {
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut original_file = original_file.syntax().clone();
|
let mut original_file = original_file.syntax().clone();
|
||||||
let mut hypothetical_file = file_with_fake_ident.syntax().clone();
|
let mut speculative_file = file_with_fake_ident.syntax().clone();
|
||||||
let mut offset = position.offset;
|
let mut offset = position.offset;
|
||||||
let mut fake_ident_token = fake_ident_token;
|
let mut fake_ident_token = fake_ident_token;
|
||||||
|
|
||||||
// Are we inside a macro call?
|
// Are we inside a macro call?
|
||||||
while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
|
while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
|
||||||
find_node_at_offset::<ast::MacroCall>(&original_file, offset),
|
find_node_at_offset::<ast::MacroCall>(&original_file, offset),
|
||||||
find_node_at_offset::<ast::MacroCall>(&hypothetical_file, offset),
|
find_node_at_offset::<ast::MacroCall>(&speculative_file, offset),
|
||||||
) {
|
) {
|
||||||
if actual_macro_call.path().as_ref().map(|s| s.syntax().text())
|
if actual_macro_call.path().as_ref().map(|s| s.syntax().text())
|
||||||
!= macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text())
|
!= macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text())
|
||||||
{
|
{
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
let hypothetical_args = match macro_call_with_fake_ident.token_tree() {
|
let speculative_args = match macro_call_with_fake_ident.token_tree() {
|
||||||
Some(tt) => tt,
|
Some(tt) => tt,
|
||||||
None => break,
|
None => break,
|
||||||
};
|
};
|
||||||
if let (Some(actual_expansion), Some(hypothetical_expansion)) = (
|
if let (Some(actual_expansion), Some(speculative_expansion)) = (
|
||||||
ctx.sema.expand(&actual_macro_call),
|
ctx.sema.expand(&actual_macro_call),
|
||||||
ctx.sema.speculative_expand(
|
ctx.sema.speculative_expand(
|
||||||
&actual_macro_call,
|
&actual_macro_call,
|
||||||
&hypothetical_args,
|
&speculative_args,
|
||||||
fake_ident_token,
|
fake_ident_token,
|
||||||
),
|
),
|
||||||
) {
|
) {
|
||||||
let new_offset = hypothetical_expansion.1.text_range().start();
|
let new_offset = speculative_expansion.1.text_range().start();
|
||||||
if new_offset > actual_expansion.text_range().end() {
|
if new_offset > actual_expansion.text_range().end() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
original_file = actual_expansion;
|
original_file = actual_expansion;
|
||||||
hypothetical_file = hypothetical_expansion.0;
|
speculative_file = speculative_expansion.0;
|
||||||
fake_ident_token = hypothetical_expansion.1;
|
fake_ident_token = speculative_expansion.1;
|
||||||
offset = new_offset;
|
offset = new_offset;
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ctx.fill_keyword_patterns(&hypothetical_file, offset);
|
ctx.fill_keyword_patterns(&speculative_file, offset);
|
||||||
ctx.fill(&original_file, hypothetical_file, offset);
|
ctx.fill(&original_file, speculative_file, offset);
|
||||||
Some(ctx)
|
Some(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ pub use crate::{
|
||||||
/// identifier prefix/fuzzy match should be done higher in the stack, together
|
/// identifier prefix/fuzzy match should be done higher in the stack, together
|
||||||
/// with ordering of completions (currently this is done by the client).
|
/// with ordering of completions (currently this is done by the client).
|
||||||
///
|
///
|
||||||
/// # Hypothetical Completion Problem
|
/// # Speculative Completion Problem
|
||||||
///
|
///
|
||||||
/// There's a curious unsolved problem in the current implementation. Often, you
|
/// There's a curious unsolved problem in the current implementation. Often, you
|
||||||
/// want to compute completions on a *slightly different* text document.
|
/// want to compute completions on a *slightly different* text document.
|
||||||
|
@ -121,7 +121,7 @@ pub use crate::{
|
||||||
/// doesn't allow such "phantom" inputs.
|
/// doesn't allow such "phantom" inputs.
|
||||||
///
|
///
|
||||||
/// Another case where this would be instrumental is macro expansion. We want to
|
/// Another case where this would be instrumental is macro expansion. We want to
|
||||||
/// insert a fake ident and re-expand code. There's `expand_hypothetical` as a
|
/// insert a fake ident and re-expand code. There's `expand_speculative` as a
|
||||||
/// work-around for this.
|
/// work-around for this.
|
||||||
///
|
///
|
||||||
/// A different use-case is completion of injection (examples and links in doc
|
/// A different use-case is completion of injection (examples and links in doc
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue