Merge remote-tracking branch 'upstream/master' into sync-from-rust

This commit is contained in:
Laurențiu Nicola 2023-12-12 12:26:15 +02:00
commit 13d4609f50
11 changed files with 95 additions and 75 deletions

View file

@ -5,8 +5,7 @@
//! node for a *child*, and get its hir. //! node for a *child*, and get its hir.
use either::Either; use either::Either;
use hir_expand::HirFileId; use hir_expand::{attrs::collect_attrs, HirFileId};
use syntax::ast::HasDocComments;
use crate::{ use crate::{
db::DefDatabase, db::DefDatabase,
@ -118,8 +117,8 @@ impl ChildBySource for ItemScope {
|(ast_id, calls)| { |(ast_id, calls)| {
let adt = ast_id.to_node(db.upcast()); let adt = ast_id.to_node(db.upcast());
calls.for_each(|(attr_id, call_id, calls)| { calls.for_each(|(attr_id, call_id, calls)| {
if let Some(Either::Left(attr)) = if let Some((_, Either::Left(attr))) =
adt.doc_comments_and_attrs().nth(attr_id.ast_index()) collect_attrs(&adt).nth(attr_id.ast_index())
{ {
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into())); res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
} }

View file

@ -10,14 +10,14 @@ use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult}; use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use syntax::{ use syntax::{
ast::{self, HasAttrs, HasDocComments}, ast::{self, HasAttrs},
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
}; };
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
ast_id_map::AstIdMap, ast_id_map::AstIdMap,
attrs::RawAttrs, attrs::{collect_attrs, RawAttrs},
builtin_attr_macro::pseudo_derive_attr_expansion, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, builtin_fn_macro::EagerExpander,
fixup::{self, SyntaxFixupUndoInfo}, fixup::{self, SyntaxFixupUndoInfo},
@ -216,9 +216,9 @@ pub fn expand_speculative(
// Attributes may have an input token tree, build the subtree and map for this as well // Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree. // then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?; let item = ast::Item::cast(speculative_args.clone())?;
item.doc_comments_and_attrs() collect_attrs(&item)
.nth(invoc_attr_index.ast_index()) .nth(invoc_attr_index.ast_index())
.and_then(Either::left) .and_then(|x| Either::left(x.1))
}?; }?;
match attr.token_tree() { match attr.token_tree() {
Some(token_tree) => { Some(token_tree) => {
@ -479,10 +479,9 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None, MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
MacroCallKind::Attr { invoc_attr_index, .. } => { MacroCallKind::Attr { invoc_attr_index, .. } => {
cov_mark::hit!(attribute_macro_attr_censoring); cov_mark::hit!(attribute_macro_attr_censoring);
ast::Item::cast(node.clone())? collect_attrs(&ast::Item::cast(node.clone())?)
.doc_comments_and_attrs()
.nth(invoc_attr_index.ast_index()) .nth(invoc_attr_index.ast_index())
.and_then(Either::left) .and_then(|x| Either::left(x.1))
.map(|attr| attr.syntax().clone()) .map(|attr| attr.syntax().clone())
.into_iter() .into_iter()
.collect() .collect()

View file

@ -22,6 +22,7 @@ pub mod span;
pub mod files; pub mod files;
mod fixup; mod fixup;
use attrs::collect_attrs;
use triomphe::Arc; use triomphe::Arc;
use std::{fmt, hash::Hash}; use std::{fmt, hash::Hash};
@ -32,7 +33,7 @@ use base_db::{
}; };
use either::Either; use either::Either;
use syntax::{ use syntax::{
ast::{self, AstNode, HasDocComments}, ast::{self, AstNode},
SyntaxNode, SyntaxToken, TextRange, TextSize, SyntaxNode, SyntaxToken, TextRange, TextSize,
}; };
@ -438,9 +439,9 @@ impl MacroCallLoc {
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: handle `cfg_attr` // FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| { ast_id.with_value(ast_id.to_node(db)).map(|it| {
it.doc_comments_and_attrs() collect_attrs(&it)
.nth(derive_attr_index.ast_index()) .nth(derive_attr_index.ast_index())
.and_then(|it| match it { .and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()), Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None, Either::Right(_) => None,
}) })
@ -451,9 +452,9 @@ impl MacroCallLoc {
if self.def.is_attribute_derive() { if self.def.is_attribute_derive() {
// FIXME: handle `cfg_attr` // FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| { ast_id.with_value(ast_id.to_node(db)).map(|it| {
it.doc_comments_and_attrs() collect_attrs(&it)
.nth(invoc_attr_index.ast_index()) .nth(invoc_attr_index.ast_index())
.and_then(|it| match it { .and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()), Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None, Either::Right(_) => None,
}) })
@ -549,24 +550,24 @@ impl MacroCallKind {
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive // FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr` // FIXME: handle `cfg_attr`
ast_id collect_attrs(&ast_id.to_node(db))
.to_node(db)
.doc_comments_and_attrs()
.nth(derive_attr_index.ast_index()) .nth(derive_attr_index.ast_index())
.expect("missing derive") .expect("missing derive")
.1
.expect_left("derive is a doc comment?") .expect_left("derive is a doc comment?")
.syntax() .syntax()
.text_range() .text_range()
} }
// FIXME: handle `cfg_attr` // FIXME: handle `cfg_attr`
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
.to_node(db) collect_attrs(&ast_id.to_node(db))
.doc_comments_and_attrs() .nth(invoc_attr_index.ast_index())
.nth(invoc_attr_index.ast_index()) .expect("missing attribute")
.expect("missing attribute") .1
.expect_left("attribute macro is a doc comment?") .expect_left("attribute macro is a doc comment?")
.syntax() .syntax()
.text_range(), .text_range()
}
}; };
FileRange { range, file_id } FileRange { range, file_id }
@ -737,11 +738,9 @@ impl ExpansionInfo {
let attr_input_or_mac_def = def.or_else(|| match loc.kind { let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
// FIXME: handle `cfg_attr` // FIXME: handle `cfg_attr`
let tt = ast_id let tt = collect_attrs(&ast_id.to_node(db))
.to_node(db)
.doc_comments_and_attrs()
.nth(invoc_attr_index.ast_index()) .nth(invoc_attr_index.ast_index())
.and_then(Either::left)? .and_then(|x| Either::left(x.1))?
.token_tree()?; .token_tree()?;
Some(InFile::new(ast_id.file_id, tt)) Some(InFile::new(ast_id.file_id, tt))
} }

View file

@ -20,8 +20,8 @@ use hir_def::{
AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId, AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
}; };
use hir_expand::{ use hir_expand::{
db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, InMacroFile, MacroCallId, attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo,
MacroFileId, MacroFileIdExt, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
}; };
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -29,7 +29,7 @@ use smallvec::{smallvec, SmallVec};
use stdx::TupleExt; use stdx::TupleExt;
use syntax::{ use syntax::{
algo::skip_trivia_token, algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasDocComments, HasGenericParams, HasLoopBody, IsString as _}, ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
TextRange, TextSize, TextRange, TextSize,
}; };
@ -673,11 +673,22 @@ impl<'db> SemanticsImpl<'db> {
} }
_ => 0, _ => 0,
}; };
// FIXME: here, the attribute's text range is used to strip away all
// entries from the start of the attribute "list" up the the invoking
// attribute. But in
// ```
// mod foo {
// #![inner]
// }
// ```
// we don't wanna strip away stuff in the `mod foo {` range, that is
// here if the id corresponds to an inner attribute we got strip all
// text ranges of the outer ones, and then all of the inner ones up
// to the invoking attribute so that the inbetween is ignored.
let text_range = item.syntax().text_range(); let text_range = item.syntax().text_range();
let start = item let start = collect_attrs(&item)
.doc_comments_and_attrs()
.nth(attr_id) .nth(attr_id)
.map(|attr| match attr { .map(|attr| match attr.1 {
Either::Left(it) => it.syntax().text_range().start(), Either::Left(it) => it.syntax().text_range().start(),
Either::Right(it) => it.syntax().text_range().start(), Either::Right(it) => it.syntax().text_range().start(),
}) })

View file

@ -44,21 +44,6 @@ extern crate core;
extern crate self as foo; extern crate self as foo;
struct Foo; struct Foo;
use foo::Foo as Bar; use foo::Foo as Bar;
"#,
);
}
#[test]
fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() {
check_diagnostics(
r#"
//- /lib.rs
#[macro_use] extern crate doesnotexist;
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: unresolved extern crate
mod _test_inner {
#![empty_attr]
//^^^^^^^^^^^^^^ error: unresolved macro `empty_attr`
}
"#, "#,
); );
} }

View file

@ -67,6 +67,18 @@ macro_rules! m { () => {} } }
self::m!(); self::m2!(); self::m!(); self::m2!();
//^^ error: unresolved macro `self::m2!` //^^ error: unresolved macro `self::m2!`
"#,
);
}
#[test]
fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() {
check_diagnostics(
r#"
mod _test_inner {
#![empty_attr]
//^^^^^^^^^^^^^^ error: unresolved macro `empty_attr`
}
"#, "#,
); );
} }

View file

@ -422,6 +422,11 @@ fn ty_to_text_edit(
Some(builder.finish()) Some(builder.finish())
} }
pub enum RangeLimit {
Fixed(TextRange),
NearestParent(TextSize),
}
// Feature: Inlay Hints // Feature: Inlay Hints
// //
// rust-analyzer shows additional information inline with the source code. // rust-analyzer shows additional information inline with the source code.
@ -443,7 +448,7 @@ fn ty_to_text_edit(
pub(crate) fn inlay_hints( pub(crate) fn inlay_hints(
db: &RootDatabase, db: &RootDatabase,
file_id: FileId, file_id: FileId,
range_limit: Option<TextRange>, range_limit: Option<RangeLimit>,
config: &InlayHintsConfig, config: &InlayHintsConfig,
) -> Vec<InlayHint> { ) -> Vec<InlayHint> {
let _p = profile::span("inlay_hints"); let _p = profile::span("inlay_hints");
@ -458,13 +463,31 @@ pub(crate) fn inlay_hints(
let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node); let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);
match range_limit { match range_limit {
Some(range) => match file.covering_element(range) { Some(RangeLimit::Fixed(range)) => match file.covering_element(range) {
NodeOrToken::Token(_) => return acc, NodeOrToken::Token(_) => return acc,
NodeOrToken::Node(n) => n NodeOrToken::Node(n) => n
.descendants() .descendants()
.filter(|descendant| range.intersect(descendant.text_range()).is_some()) .filter(|descendant| range.intersect(descendant.text_range()).is_some())
.for_each(hints), .for_each(hints),
}, },
Some(RangeLimit::NearestParent(position)) => {
match file.token_at_offset(position).left_biased() {
Some(token) => {
if let Some(parent_block) =
token.parent_ancestors().find_map(ast::BlockExpr::cast)
{
parent_block.syntax().descendants().for_each(hints)
} else if let Some(parent_item) =
token.parent_ancestors().find_map(ast::Item::cast)
{
parent_item.syntax().descendants().for_each(hints)
} else {
return acc;
}
}
None => return acc,
}
}
None => file.descendants().for_each(hints), None => file.descendants().for_each(hints),
}; };
} }

View file

@ -177,7 +177,11 @@ mod tests {
use syntax::{TextRange, TextSize}; use syntax::{TextRange, TextSize};
use test_utils::extract_annotations; use test_utils::extract_annotations;
use crate::{fixture, inlay_hints::InlayHintsConfig, ClosureReturnTypeHints}; use crate::{
fixture,
inlay_hints::{InlayHintsConfig, RangeLimit},
ClosureReturnTypeHints,
};
use crate::inlay_hints::tests::{ use crate::inlay_hints::tests::{
check, check_edit, check_no_edit, check_with_config, DISABLED_CONFIG, TEST_CONFIG, check, check_edit, check_no_edit, check_with_config, DISABLED_CONFIG, TEST_CONFIG,
@ -400,7 +404,7 @@ fn main() {
.inlay_hints( .inlay_hints(
&InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
file_id, file_id,
Some(TextRange::new(TextSize::from(500), TextSize::from(600))), Some(RangeLimit::Fixed(TextRange::new(TextSize::from(500), TextSize::from(600)))),
) )
.unwrap(); .unwrap();
let actual = let actual =

View file

@ -94,7 +94,7 @@ pub use crate::{
inlay_hints::{ inlay_hints::{
AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints, AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints,
InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintPosition, InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintPosition,
InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, RangeLimit,
}, },
join_lines::JoinLinesConfig, join_lines::JoinLinesConfig,
markup::Markup, markup::Markup,
@ -397,7 +397,7 @@ impl Analysis {
&self, &self,
config: &InlayHintsConfig, config: &InlayHintsConfig,
file_id: FileId, file_id: FileId,
range: Option<TextRange>, range: Option<RangeLimit>,
) -> Cancellable<Vec<InlayHint>> { ) -> Cancellable<Vec<InlayHint>> {
self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config)) self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config))
} }

View file

@ -12,8 +12,8 @@ use anyhow::Context;
use ide::{ use ide::{
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange, AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange,
HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, RangeLimit,
Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
}; };
use ide_db::SymbolKind; use ide_db::SymbolKind;
use lsp_server::ErrorCode; use lsp_server::ErrorCode;
@ -1409,7 +1409,7 @@ pub(crate) fn handle_inlay_hints(
let inlay_hints_config = snap.config.inlay_hints(); let inlay_hints_config = snap.config.inlay_hints();
Ok(Some( Ok(Some(
snap.analysis snap.analysis
.inlay_hints(&inlay_hints_config, file_id, Some(range))? .inlay_hints(&inlay_hints_config, file_id, Some(RangeLimit::Fixed(range)))?
.into_iter() .into_iter()
.map(|it| { .map(|it| {
to_proto::inlay_hint( to_proto::inlay_hint(
@ -1440,22 +1440,13 @@ pub(crate) fn handle_inlay_hints_resolve(
anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data"); anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data");
let line_index = snap.file_line_index(file_id)?; let line_index = snap.file_line_index(file_id)?;
let range = from_proto::text_range( let hint_position = from_proto::offset(&line_index, original_hint.position)?;
&line_index,
lsp_types::Range { start: original_hint.position, end: original_hint.position },
)?;
let range_start = range.start();
let range_end = range.end();
let large_range = TextRange::new(
range_start.checked_sub(1.into()).unwrap_or(range_start),
range_end.checked_add(1.into()).unwrap_or(range_end),
);
let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(); let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints();
forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty(); forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty();
let resolve_hints = snap.analysis.inlay_hints( let resolve_hints = snap.analysis.inlay_hints(
&forced_resolve_inlay_hints_config, &forced_resolve_inlay_hints_config,
file_id, file_id,
Some(large_range), Some(RangeLimit::NearestParent(hint_position)),
)?; )?;
let mut resolved_hints = resolve_hints let mut resolved_hints = resolve_hints

View file

@ -76,9 +76,6 @@ pub trait HasDocComments: HasAttrs {
fn doc_comments(&self) -> DocCommentIter { fn doc_comments(&self) -> DocCommentIter {
DocCommentIter { iter: self.syntax().children_with_tokens() } DocCommentIter { iter: self.syntax().children_with_tokens() }
} }
fn doc_comments_and_attrs(&self) -> AttrDocCommentIter {
AttrDocCommentIter { iter: self.syntax().children_with_tokens() }
}
} }
impl DocCommentIter { impl DocCommentIter {