mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-29 21:35:20 +00:00
Merge #2271
2271: Force passing Source when creating a SourceAnalyzer r=matklad a=matklad Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
d9d99369b2
10 changed files with 51 additions and 54 deletions
|
@ -117,7 +117,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
offset: Option<TextUnit>,
|
offset: Option<TextUnit>,
|
||||||
) -> SourceAnalyzer {
|
) -> SourceAnalyzer {
|
||||||
SourceAnalyzer::new(self.db, self.frange.file_id, node, offset)
|
SourceAnalyzer::new(self.db, hir::Source::new(self.frange.file_id.into(), node), offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement {
|
pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement {
|
||||||
|
|
|
@ -12,7 +12,6 @@ use hir_def::{
|
||||||
path::known,
|
path::known,
|
||||||
};
|
};
|
||||||
use hir_expand::{name::AsName, Source};
|
use hir_expand::{name::AsName, Source};
|
||||||
use ra_db::FileId;
|
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
match_ast, AstPtr,
|
match_ast, AstPtr,
|
||||||
|
@ -30,38 +29,32 @@ use crate::{
|
||||||
HirFileId, Local, MacroDef, Module, Name, Path, Resolver, Static, Struct, Ty,
|
HirFileId, Local, MacroDef, Module, Name, Path, Resolver, Static, Struct, Ty,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn try_get_resolver_for_node(
|
fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> {
|
||||||
db: &impl HirDatabase,
|
|
||||||
file_id: FileId,
|
|
||||||
node: &SyntaxNode,
|
|
||||||
) -> Option<Resolver> {
|
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match node {
|
match (node.ast) {
|
||||||
ast::Module(it) => {
|
ast::Module(it) => {
|
||||||
let src = crate::Source { file_id: file_id.into(), ast: it };
|
let src = node.with_ast(it);
|
||||||
Some(crate::Module::from_declaration(db, src)?.resolver(db))
|
Some(crate::Module::from_declaration(db, src)?.resolver(db))
|
||||||
},
|
},
|
||||||
ast::SourceFile(it) => {
|
ast::SourceFile(it) => {
|
||||||
let src =
|
let src = node.with_ast(crate::ModuleSource::SourceFile(it));
|
||||||
crate::Source { file_id: file_id.into(), ast: crate::ModuleSource::SourceFile(it) };
|
|
||||||
Some(crate::Module::from_definition(db, src)?.resolver(db))
|
Some(crate::Module::from_definition(db, src)?.resolver(db))
|
||||||
},
|
},
|
||||||
ast::StructDef(it) => {
|
ast::StructDef(it) => {
|
||||||
let src = crate::Source { file_id: file_id.into(), ast: it };
|
let src = node.with_ast(it);
|
||||||
Some(Struct::from_source(db, src)?.resolver(db))
|
Some(Struct::from_source(db, src)?.resolver(db))
|
||||||
},
|
},
|
||||||
ast::EnumDef(it) => {
|
ast::EnumDef(it) => {
|
||||||
let src = crate::Source { file_id: file_id.into(), ast: it };
|
let src = node.with_ast(it);
|
||||||
Some(Enum::from_source(db, src)?.resolver(db))
|
Some(Enum::from_source(db, src)?.resolver(db))
|
||||||
},
|
},
|
||||||
_ => {
|
_ => match node.ast.kind() {
|
||||||
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
|
FN_DEF | CONST_DEF | STATIC_DEF => {
|
||||||
Some(def_with_body_from_child_node(db, Source::new(file_id.into(), node))?.resolver(db))
|
Some(def_with_body_from_child_node(db, node)?.resolver(db))
|
||||||
} else {
|
|
||||||
// FIXME add missing cases
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
},
|
// FIXME add missing cases
|
||||||
|
_ => None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -90,7 +83,6 @@ fn def_with_body_from_child_node(
|
||||||
/// original source files. It should not be used inside the HIR itself.
|
/// original source files. It should not be used inside the HIR itself.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct SourceAnalyzer {
|
pub struct SourceAnalyzer {
|
||||||
// FIXME: this doesn't handle macros at all
|
|
||||||
file_id: HirFileId,
|
file_id: HirFileId,
|
||||||
resolver: Resolver,
|
resolver: Resolver,
|
||||||
body_owner: Option<DefWithBody>,
|
body_owner: Option<DefWithBody>,
|
||||||
|
@ -137,20 +129,16 @@ pub struct ReferenceDescriptor {
|
||||||
impl SourceAnalyzer {
|
impl SourceAnalyzer {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
db: &impl HirDatabase,
|
db: &impl HirDatabase,
|
||||||
file_id: FileId,
|
node: Source<&SyntaxNode>,
|
||||||
node: &SyntaxNode,
|
|
||||||
offset: Option<TextUnit>,
|
offset: Option<TextUnit>,
|
||||||
) -> SourceAnalyzer {
|
) -> SourceAnalyzer {
|
||||||
let node_source = Source::new(file_id.into(), node);
|
let def_with_body = def_with_body_from_child_node(db, node);
|
||||||
let def_with_body = def_with_body_from_child_node(db, node_source);
|
|
||||||
if let Some(def) = def_with_body {
|
if let Some(def) = def_with_body {
|
||||||
let source_map = def.body_source_map(db);
|
let source_map = def.body_source_map(db);
|
||||||
let scopes = def.expr_scopes(db);
|
let scopes = def.expr_scopes(db);
|
||||||
let scope = match offset {
|
let scope = match offset {
|
||||||
None => scope_for(&scopes, &source_map, node_source),
|
None => scope_for(&scopes, &source_map, node),
|
||||||
Some(offset) => {
|
Some(offset) => scope_for_offset(&scopes, &source_map, node.with_ast(offset)),
|
||||||
scope_for_offset(&scopes, &source_map, Source::new(file_id.into(), offset))
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
let resolver = expr::resolver_for_scope(db, def, scope);
|
let resolver = expr::resolver_for_scope(db, def, scope);
|
||||||
SourceAnalyzer {
|
SourceAnalyzer {
|
||||||
|
@ -159,19 +147,20 @@ impl SourceAnalyzer {
|
||||||
body_source_map: Some(source_map),
|
body_source_map: Some(source_map),
|
||||||
infer: Some(def.infer(db)),
|
infer: Some(def.infer(db)),
|
||||||
scopes: Some(scopes),
|
scopes: Some(scopes),
|
||||||
file_id: file_id.into(),
|
file_id: node.file_id,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
SourceAnalyzer {
|
SourceAnalyzer {
|
||||||
resolver: node
|
resolver: node
|
||||||
|
.ast
|
||||||
.ancestors()
|
.ancestors()
|
||||||
.find_map(|node| try_get_resolver_for_node(db, file_id, &node))
|
.find_map(|it| try_get_resolver_for_node(db, node.with_ast(&it)))
|
||||||
.unwrap_or_default(),
|
.unwrap_or_default(),
|
||||||
body_owner: None,
|
body_owner: None,
|
||||||
body_source_map: None,
|
body_source_map: None,
|
||||||
infer: None,
|
infer: None,
|
||||||
scopes: None,
|
scopes: None,
|
||||||
file_id: file_id.into(),
|
file_id: node.file_id,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
mod never_type;
|
||||||
|
mod coercion;
|
||||||
|
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
@ -11,7 +14,7 @@ use ra_syntax::{
|
||||||
use test_utils::covers;
|
use test_utils::covers;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult,
|
expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult, Source,
|
||||||
SourceAnalyzer,
|
SourceAnalyzer,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -19,9 +22,6 @@ use crate::{
|
||||||
// against snapshots of the expected results using insta. Use cargo-insta to
|
// against snapshots of the expected results using insta. Use cargo-insta to
|
||||||
// update the snapshots.
|
// update the snapshots.
|
||||||
|
|
||||||
mod never_type;
|
|
||||||
mod coercion;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn cfg_impl_block() {
|
fn cfg_impl_block() {
|
||||||
let (db, pos) = TestDB::with_position(
|
let (db, pos) = TestDB::with_position(
|
||||||
|
@ -4609,7 +4609,8 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
|
||||||
fn type_at_pos(db: &TestDB, pos: FilePosition) -> String {
|
fn type_at_pos(db: &TestDB, pos: FilePosition) -> String {
|
||||||
let file = db.parse(pos.file_id).ok().unwrap();
|
let file = db.parse(pos.file_id).ok().unwrap();
|
||||||
let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
|
let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
|
||||||
let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset));
|
let analyzer =
|
||||||
|
SourceAnalyzer::new(db, Source::new(pos.file_id.into(), expr.syntax()), Some(pos.offset));
|
||||||
let ty = analyzer.type_of(db, &expr).unwrap();
|
let ty = analyzer.type_of(db, &expr).unwrap();
|
||||||
ty.display(db).to_string()
|
ty.display(db).to_string()
|
||||||
}
|
}
|
||||||
|
@ -4674,7 +4675,7 @@ fn infer(content: &str) -> String {
|
||||||
|
|
||||||
for node in source_file.syntax().descendants() {
|
for node in source_file.syntax().descendants() {
|
||||||
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
|
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
|
||||||
let analyzer = SourceAnalyzer::new(&db, file_id, &node, None);
|
let analyzer = SourceAnalyzer::new(&db, Source::new(file_id.into(), &node), None);
|
||||||
infer_def(analyzer.inference_result(), analyzer.body_source_map());
|
infer_def(analyzer.inference_result(), analyzer.body_source_map());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4715,7 +4716,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||||
let file = db.parse(pos.file_id).ok().unwrap();
|
let file = db.parse(pos.file_id).ok().unwrap();
|
||||||
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
|
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
|
||||||
let events = db.log_executed(|| {
|
let events = db.log_executed(|| {
|
||||||
SourceAnalyzer::new(&db, pos.file_id, &node, None);
|
SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None);
|
||||||
});
|
});
|
||||||
assert!(format!("{:?}", events).contains("infer"))
|
assert!(format!("{:?}", events).contains("infer"))
|
||||||
}
|
}
|
||||||
|
@ -4735,7 +4736,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||||
let file = db.parse(pos.file_id).ok().unwrap();
|
let file = db.parse(pos.file_id).ok().unwrap();
|
||||||
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
|
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
|
||||||
let events = db.log_executed(|| {
|
let events = db.log_executed(|| {
|
||||||
SourceAnalyzer::new(&db, pos.file_id, &node, None);
|
SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None);
|
||||||
});
|
});
|
||||||
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
|
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,11 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
|
||||||
let calling_node = FnCallNode::with_node(&syntax, position.offset)?;
|
let calling_node = FnCallNode::with_node(&syntax, position.offset)?;
|
||||||
let name_ref = calling_node.name_ref()?;
|
let name_ref = calling_node.name_ref()?;
|
||||||
|
|
||||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
|
let analyzer = hir::SourceAnalyzer::new(
|
||||||
|
db,
|
||||||
|
hir::Source::new(position.file_id.into(), name_ref.syntax()),
|
||||||
|
None,
|
||||||
|
);
|
||||||
let (mut call_info, has_self) = match &calling_node {
|
let (mut call_info, has_self) = match &calling_node {
|
||||||
FnCallNode::CallExpr(expr) => {
|
FnCallNode::CallExpr(expr) => {
|
||||||
//FIXME: apply subst
|
//FIXME: apply subst
|
||||||
|
|
|
@ -58,8 +58,11 @@ impl<'a> CompletionContext<'a> {
|
||||||
);
|
);
|
||||||
let token =
|
let token =
|
||||||
original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
|
original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
|
||||||
let analyzer =
|
let analyzer = hir::SourceAnalyzer::new(
|
||||||
hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset));
|
db,
|
||||||
|
hir::Source::new(position.file_id.into(), &token.parent()),
|
||||||
|
Some(position.offset),
|
||||||
|
);
|
||||||
let mut ctx = CompletionContext {
|
let mut ctx = CompletionContext {
|
||||||
db,
|
db,
|
||||||
analyzer,
|
analyzer,
|
||||||
|
|
|
@ -18,7 +18,8 @@ pub(crate) fn goto_type_definition(
|
||||||
.find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())
|
.find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, &node, None);
|
let analyzer =
|
||||||
|
hir::SourceAnalyzer::new(db, hir::Source::new(position.file_id.into(), &node), None);
|
||||||
|
|
||||||
let ty: hir::Ty = if let Some(ty) =
|
let ty: hir::Ty = if let Some(ty) =
|
||||||
ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
|
ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
|
||||||
|
|
|
@ -230,7 +230,8 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
||||||
.ancestors()
|
.ancestors()
|
||||||
.take_while(|it| it.text_range() == leaf_node.text_range())
|
.take_while(|it| it.text_range() == leaf_node.text_range())
|
||||||
.find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
|
.find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
|
||||||
let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, &node, None);
|
let analyzer =
|
||||||
|
hir::SourceAnalyzer::new(db, hir::Source::new(frange.file_id.into(), &node), None);
|
||||||
let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
|
let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
|
||||||
{
|
{
|
||||||
ty
|
ty
|
||||||
|
|
|
@ -32,6 +32,7 @@ fn get_inlay_hints(
|
||||||
file_id: FileId,
|
file_id: FileId,
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
) -> Option<Vec<InlayHint>> {
|
) -> Option<Vec<InlayHint>> {
|
||||||
|
let analyzer = SourceAnalyzer::new(db, hir::Source::new(file_id.into(), node), None);
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match node {
|
match node {
|
||||||
ast::LetStmt(it) => {
|
ast::LetStmt(it) => {
|
||||||
|
@ -39,11 +40,9 @@ fn get_inlay_hints(
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let pat = it.pat()?;
|
let pat = it.pat()?;
|
||||||
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
|
|
||||||
Some(get_pat_type_hints(db, &analyzer, pat, false))
|
Some(get_pat_type_hints(db, &analyzer, pat, false))
|
||||||
},
|
},
|
||||||
ast::LambdaExpr(it) => {
|
ast::LambdaExpr(it) => {
|
||||||
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
|
|
||||||
it.param_list().map(|param_list| {
|
it.param_list().map(|param_list| {
|
||||||
param_list
|
param_list
|
||||||
.params()
|
.params()
|
||||||
|
@ -56,21 +55,17 @@ fn get_inlay_hints(
|
||||||
},
|
},
|
||||||
ast::ForExpr(it) => {
|
ast::ForExpr(it) => {
|
||||||
let pat = it.pat()?;
|
let pat = it.pat()?;
|
||||||
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
|
|
||||||
Some(get_pat_type_hints(db, &analyzer, pat, false))
|
Some(get_pat_type_hints(db, &analyzer, pat, false))
|
||||||
},
|
},
|
||||||
ast::IfExpr(it) => {
|
ast::IfExpr(it) => {
|
||||||
let pat = it.condition()?.pat()?;
|
let pat = it.condition()?.pat()?;
|
||||||
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
|
|
||||||
Some(get_pat_type_hints(db, &analyzer, pat, true))
|
Some(get_pat_type_hints(db, &analyzer, pat, true))
|
||||||
},
|
},
|
||||||
ast::WhileExpr(it) => {
|
ast::WhileExpr(it) => {
|
||||||
let pat = it.condition()?.pat()?;
|
let pat = it.condition()?.pat()?;
|
||||||
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
|
|
||||||
Some(get_pat_type_hints(db, &analyzer, pat, true))
|
Some(get_pat_type_hints(db, &analyzer, pat, true))
|
||||||
},
|
},
|
||||||
ast::MatchArmList(it) => {
|
ast::MatchArmList(it) => {
|
||||||
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
|
|
||||||
Some(
|
Some(
|
||||||
it
|
it
|
||||||
.arms()
|
.arms()
|
||||||
|
|
|
@ -129,7 +129,8 @@ pub(crate) fn classify_name_ref(
|
||||||
let _p = profile("classify_name_ref");
|
let _p = profile("classify_name_ref");
|
||||||
|
|
||||||
let parent = name_ref.syntax().parent()?;
|
let parent = name_ref.syntax().parent()?;
|
||||||
let analyzer = SourceAnalyzer::new(db, file_id, name_ref.syntax(), None);
|
let analyzer =
|
||||||
|
SourceAnalyzer::new(db, hir::Source::new(file_id.into(), name_ref.syntax()), None);
|
||||||
|
|
||||||
if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
|
if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
|
||||||
tested_by!(goto_definition_works_for_methods);
|
tested_by!(goto_definition_works_for_methods);
|
||||||
|
|
|
@ -176,9 +176,11 @@ impl SourceFile {
|
||||||
/// ```
|
/// ```
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! match_ast {
|
macro_rules! match_ast {
|
||||||
(match $node:ident {
|
(match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
|
||||||
|
|
||||||
|
(match ($node:expr) {
|
||||||
$( ast::$ast:ident($it:ident) => $res:block, )*
|
$( ast::$ast:ident($it:ident) => $res:block, )*
|
||||||
_ => $catch_all:expr,
|
_ => $catch_all:expr $(,)?
|
||||||
}) => {{
|
}) => {{
|
||||||
$( if let Some($it) = ast::$ast::cast($node.clone()) $res else )*
|
$( if let Some($it) = ast::$ast::cast($node.clone()) $res else )*
|
||||||
{ $catch_all }
|
{ $catch_all }
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue