Merge ref 'd087f112b7d1:/library/compiler-builtins' from https://github.com/rust-lang/rust

Pull recent changes from rust-lang/rust via Josh.

Upstream ref: d087f112b7d1323446c7b39a8b616aee7fa56b3d
Filtered ref: 2d43ce8ac022170e5383f7e5a188b55564b6566a
This commit is contained in:
Trevor Gross 2025-06-14 04:26:26 +00:00
commit e6efb03ebd
80 changed files with 2372 additions and 1098 deletions

View file

@ -28,7 +28,7 @@ jobs:
run: rustup update --no-self-update stable
- name: Install cargo-workspaces
run: cargo install cargo-workspaces
run: cargo install cargo-workspaces --version "0.3.6"
- name: Publish Crates
env:
@ -54,8 +54,8 @@ jobs:
cargo workspaces rename --from project-model project_model
cargo workspaces rename --from test-fixture test_fixture
cargo workspaces rename --from test-utils test_utils
# Remove library crates from the workspaces so we don't auto-publish them as well
sed -i 's/ "lib\/\*",//' ./Cargo.toml
# Remove library crates and xtask from the workspaces so we don't auto-publish them as well
sed -i 's|^members = .*$|members = ["crates/*"]|' Cargo.toml
cargo workspaces rename ra_ap_%n
find crates/rust-analyzer -type f -name '*.rs' -exec sed -i 's/rust_analyzer/ra_ap_rust_analyzer/g' {} +
cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$(($RUN_NUMBER + 133))

View file

@ -22,7 +22,7 @@ jobs:
run: rustup update --no-self-update stable
- name: Install cargo-workspaces
run: cargo install cargo-workspaces
run: cargo install cargo-workspaces --version "0.3.6"
- name: Publish Crates
env:

View file

@ -21,6 +21,8 @@ smol_str.opt-level = 3
text-size.opt-level = 3
serde.opt-level = 3
salsa.opt-level = 3
dissimilar.opt-level = 3
# This speeds up `cargo xtask dist`.
miniz_oxide.opt-level = 3

View file

@ -67,8 +67,14 @@ pub mod keys {
pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new();
pub const MACRO_CALL: Key<ast::MacroCall, MacroCallId> = Key::new();
pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> =
Key::new();
pub const DERIVE_MACRO_CALL: Key<
ast::Attr,
(
AttrId,
/* derive() */ MacroCallId,
/* actual derive macros */ Box<[Option<MacroCallId>]>,
),
> = Key::new();
/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are
/// equal if they point to exactly the same object.

View file

@ -931,11 +931,12 @@ pub fn new() {
// PATH_TYPE@23..26
// PATH@23..26
// PATH_SEGMENT@23..26
// L_ANGLE@23..24 "<"
// PAREN_TYPE@24..26
// L_PAREN@24..25 "("
// ERROR@25..26
// INT_NUMBER@25..26 "8"
// TYPE_ANCHOR@23..26
// L_ANGLE@23..24 "<"
// PAREN_TYPE@24..26
// L_PAREN@24..25 "("
// ERROR@25..26
// INT_NUMBER@25..26 "8"
// PLUS@26..27 "+"
// CONST_ARG@27..28
// LITERAL@27..28

View file

@ -31,6 +31,7 @@ use crate::{
#[query_group::query_group]
pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::infer::infer_query)]
#[salsa::cycle(cycle_result = crate::infer::infer_cycle_result)]
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
// region:mir
@ -132,6 +133,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
// FIXME: Make this a non-interned query.
#[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::const_param_ty_with_diagnostics_cycle_result)]
fn const_param_ty_with_diagnostics(&self, def: ConstParamId) -> (Ty, Diagnostics);
#[salsa::invoke(crate::lower::const_param_ty_query)]

View file

@ -35,7 +35,8 @@ use chalk_ir::{
use either::Either;
use hir_def::{
AdtId, AssocItemId, ConstId, DefWithBodyId, FieldId, FunctionId, GenericDefId, GenericParamId,
ImplId, ItemContainerId, Lookup, TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId,
ImplId, ItemContainerId, LocalFieldId, Lookup, TraitId, TupleFieldId, TupleId, TypeAliasId,
VariantId,
builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
expr_store::{Body, ExpressionStore, HygieneId, path::Path},
hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId},
@ -135,6 +136,10 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
Arc::new(ctx.resolve_all())
}
pub(crate) fn infer_cycle_result(_: &dyn HirDatabase, _: DefWithBodyId) -> Arc<InferenceResult> {
Arc::new(InferenceResult { has_errors: true, ..Default::default() })
}
/// Fully normalize all the types found within `ty` in context of `owner` body definition.
///
/// This is appropriate to use only after type-check: it assumes
@ -203,7 +208,7 @@ pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>;
pub enum InferenceDiagnostic {
NoSuchField {
field: ExprOrPatId,
private: bool,
private: Option<LocalFieldId>,
variant: VariantId,
},
PrivateField {
@ -558,6 +563,9 @@ impl InferenceResult {
ExprOrPatId::PatId(id) => self.type_of_pat.get(id),
}
}
pub fn is_erroneous(&self) -> bool {
self.has_errors && self.type_of_expr.iter().count() == 0
}
}
impl Index<ExprId> for InferenceResult {

View file

@ -554,7 +554,7 @@ impl InferenceContext<'_> {
self.push_diagnostic(
InferenceDiagnostic::NoSuchField {
field: field.expr.into(),
private: true,
private: Some(local_id),
variant: def,
},
);
@ -564,7 +564,7 @@ impl InferenceContext<'_> {
None => {
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
field: field.expr.into(),
private: false,
private: None,
variant: def,
});
None

View file

@ -143,7 +143,7 @@ impl InferenceContext<'_> {
{
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
field: inner.into(),
private: true,
private: Some(local_id),
variant: def,
});
}
@ -157,7 +157,7 @@ impl InferenceContext<'_> {
None => {
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
field: inner.into(),
private: false,
private: None,
variant: def,
});
self.err_ty()

View file

@ -1604,6 +1604,14 @@ pub(crate) fn impl_self_ty_with_diagnostics_query(
)
}
pub(crate) fn impl_self_ty_with_diagnostics_cycle_result(
db: &dyn HirDatabase,
impl_id: ImplId,
) -> (Binders<Ty>, Diagnostics) {
let generics = generics(db, impl_id.into());
(make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
}
pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty {
db.const_param_ty_with_diagnostics(def).0
}
@ -1633,12 +1641,12 @@ pub(crate) fn const_param_ty_with_diagnostics_query(
(ty, create_diagnostics(ctx.diagnostics))
}
pub(crate) fn impl_self_ty_with_diagnostics_cycle_result(
db: &dyn HirDatabase,
impl_id: ImplId,
) -> (Binders<Ty>, Diagnostics) {
let generics = generics(db, impl_id.into());
(make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
pub(crate) fn const_param_ty_with_diagnostics_cycle_result(
_: &dyn HirDatabase,
_: crate::db::HirDatabaseData,
_: ConstParamId,
) -> (Ty, Diagnostics) {
(TyKind::Error.intern(Interner), None)
}
pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<Binders<TraitRef>> {

View file

@ -2182,7 +2182,7 @@ pub fn lower_to_mir(
// need to take this input explicitly.
root_expr: ExprId,
) -> Result<MirBody> {
if infer.type_mismatches().next().is_some() {
if infer.type_mismatches().next().is_some() || infer.is_erroneous() {
return Err(MirLowerError::HasErrors);
}
let mut ctx = MirLowerCtx::new(db, owner, body, infer);

View file

@ -106,3 +106,256 @@ fn baz() -> i32 {
assert_eq!(format!("{events:?}").matches("infer_shim").count(), 1, "{events:#?}")
}
}
#[test]
fn adding_struct_invalidates_infer() {
let (mut db, pos) = TestDB::with_position(
"
//- /lib.rs
fn foo() -> i32 {
1 + 1
}
fn bar() -> f32 {
2.0 * 3.0
}
$0",
);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
});
assert!(format!("{events:?}").contains("trait_impls_in_crate_shim"))
}
let new_text = "
fn foo() -> i32 {
1 + 1
}
fn bar() -> f32 {
2.0 * 3.0
}
pub struct NewStruct {
field: i32,
}
";
db.set_file_text(pos.file_id.file_id(&db), new_text);
{
let actual = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
});
let expected = vec![
"parse_shim".to_owned(),
"ast_id_map_shim".to_owned(),
"file_item_tree_shim".to_owned(),
"real_span_map_shim".to_owned(),
"crate_local_def_map".to_owned(),
"trait_impls_in_crate_shim".to_owned(),
];
assert_eq!(expected, actual);
}
}
#[test]
fn adding_enum_query_log() {
let (mut db, pos) = TestDB::with_position(
"
//- /lib.rs
fn foo() -> i32 {
1 + 1
}
fn bar() -> f32 {
2.0 * 3.0
}
$0",
);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
});
assert!(format!("{events:?}").contains("trait_impls_in_crate_shim"))
}
let new_text = "
fn foo() -> i32 {
1 + 1
}
fn bar() -> f32 {
2.0 * 3.0
}
pub enum SomeEnum {
A,
B
}
";
db.set_file_text(pos.file_id.file_id(&db), new_text);
{
let actual = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
});
let expected = vec![
"parse_shim".to_owned(),
"ast_id_map_shim".to_owned(),
"file_item_tree_shim".to_owned(),
"real_span_map_shim".to_owned(),
"crate_local_def_map".to_owned(),
"trait_impls_in_crate_shim".to_owned(),
];
assert_eq!(expected, actual);
}
}
#[test]
fn adding_use_query_log() {
let (mut db, pos) = TestDB::with_position(
"
//- /lib.rs
fn foo() -> i32 {
1 + 1
}
fn bar() -> f32 {
2.0 * 3.0
}
$0",
);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
});
assert!(format!("{events:?}").contains("trait_impls_in_crate_shim"))
}
let new_text = "
use std::collections::HashMap;
fn foo() -> i32 {
1 + 1
}
fn bar() -> f32 {
2.0 * 3.0
}
";
db.set_file_text(pos.file_id.file_id(&db), new_text);
{
let actual = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
});
let expected = vec![
"parse_shim".to_owned(),
"ast_id_map_shim".to_owned(),
"file_item_tree_shim".to_owned(),
"real_span_map_shim".to_owned(),
"crate_local_def_map".to_owned(),
"trait_impls_in_crate_shim".to_owned(),
];
assert_eq!(expected, actual);
}
}
#[test]
fn adding_impl_query_log() {
let (mut db, pos) = TestDB::with_position(
"
//- /lib.rs
fn foo() -> i32 {
1 + 1
}
fn bar() -> f32 {
2.0 * 3.0
}
pub struct SomeStruct {
field: i32,
}
$0",
);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
});
assert!(format!("{events:?}").contains("trait_impls_in_crate_shim"))
}
let new_text = "
fn foo() -> i32 {
1 + 1
}
fn bar() -> f32 {
2.0 * 3.0
}
pub struct SomeStruct {
field: i32,
}
impl SomeStruct {
pub fn new(value: i32) -> Self {
Self { field: value }
}
}
";
db.set_file_text(pos.file_id.file_id(&db), new_text);
{
let actual = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
});
let expected = vec![
"parse_shim".to_owned(),
"ast_id_map_shim".to_owned(),
"file_item_tree_shim".to_owned(),
"real_span_map_shim".to_owned(),
"crate_local_def_map".to_owned(),
"trait_impls_in_crate_shim".to_owned(),
"attrs_shim".to_owned(),
"impl_trait_with_diagnostics_shim".to_owned(),
"impl_signature_shim".to_owned(),
"impl_signature_with_source_map_shim".to_owned(),
"impl_self_ty_with_diagnostics_shim".to_owned(),
"struct_signature_shim".to_owned(),
"struct_signature_with_source_map_shim".to_owned(),
"type_for_adt_tracked".to_owned(),
];
assert_eq!(expected, actual);
}
}

View file

@ -2301,3 +2301,51 @@ trait Foo {
"#]],
);
}
#[test]
fn no_panic_on_recursive_const() {
check_infer(
r#"
struct Foo<const N: usize> {}
impl<const N: Foo<N>> Foo<N> {
fn foo(self) {}
}
fn test() {
let _ = N;
}
"#,
expect![[r#"
72..76 'self': Foo<N>
78..80 '{}': ()
94..112 '{ ...= N; }': ()
104..105 '_': {unknown}
108..109 'N': {unknown}
"#]],
);
check_infer(
r#"
struct Foo<const N: usize>;
const N: Foo<N> = Foo;
impl<const N: usize> Foo<N> {
fn foo(self) -> usize {
N
}
}
fn test() {
let _ = N;
}
"#,
expect![[r#"
93..97 'self': Foo<N>
108..125 '{ ... }': usize
118..119 'N': usize
139..157 '{ ...= N; }': ()
149..150 '_': Foo<_>
153..154 'N': Foo<_>
"#]],
);
}

View file

@ -224,7 +224,7 @@ pub struct MalformedDerive {
#[derive(Debug)]
pub struct NoSuchField {
pub field: InFile<AstPtr<Either<ast::RecordExprField, ast::RecordPatField>>>,
pub private: bool,
pub private: Option<Field>,
pub variant: VariantId,
}
@ -648,6 +648,7 @@ impl AnyDiagnostic {
}
ExprOrPatId::PatId(pat) => source_map.pat_field_syntax(pat),
};
let private = private.map(|id| Field { id, parent: variant.into() });
NoSuchField { field: expr_or_pat, private, variant }.into()
}
&InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {

View file

@ -222,6 +222,21 @@ impl<DB: HirDatabase> Semantics<'_, DB> {
self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
}
// FIXME: Rethink this API
pub fn find_namelike_at_offset_with_descend<'slf>(
&'slf self,
node: &SyntaxNode,
offset: TextSize,
) -> impl Iterator<Item = ast::NameLike> + 'slf {
node.token_at_offset(offset)
.map(move |token| self.descend_into_macros_no_opaque(token))
.map(|descendants| descendants.into_iter().filter_map(move |it| it.value.parent()))
// re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first
// See algo::ancestors_at_offset, which uses the same approach
.kmerge_by(|left, right| left.text_range().len().lt(&right.text_range().len()))
.filter_map(ast::NameLike::cast)
}
pub fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<Struct> {
self.imp.resolve_range_pat(range_pat).map(Struct::from)
}
@ -535,7 +550,7 @@ impl<'db> SemanticsImpl<'db> {
}
pub fn is_derive_annotated(&self, adt: InFile<&ast::Adt>) -> bool {
self.with_ctx(|ctx| ctx.has_derives(adt))
self.with_ctx(|ctx| ctx.file_of_adt_has_derives(adt))
}
pub fn derive_helpers_in_scope(&self, adt: &ast::Adt) -> Option<Vec<(Symbol, Symbol)>> {
@ -644,7 +659,7 @@ impl<'db> SemanticsImpl<'db> {
/// Checks if renaming `renamed` to `new_name` may introduce conflicts with other locals,
/// and returns the conflicting locals.
pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &str) -> Vec<Local> {
pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> {
let body = self.db.body(to_be_renamed.parent);
let resolver = to_be_renamed.parent.resolver(self.db);
let starting_expr =
@ -653,7 +668,7 @@ impl<'db> SemanticsImpl<'db> {
body: &body,
conflicts: FxHashSet::default(),
db: self.db,
new_name: Symbol::intern(new_name),
new_name: new_name.symbol().clone(),
old_name: to_be_renamed.name(self.db).symbol().clone(),
owner: to_be_renamed.parent,
to_be_renamed: to_be_renamed.binding_id,
@ -877,8 +892,9 @@ impl<'db> SemanticsImpl<'db> {
if first == last {
// node is just the token, so descend the token
self.descend_into_macros_impl(
self.descend_into_macros_all(
InFile::new(file.file_id, first),
false,
&mut |InFile { value, .. }, _ctx| {
if let Some(node) = value
.parent_ancestors()
@ -887,20 +903,21 @@ impl<'db> SemanticsImpl<'db> {
{
res.push(node)
}
CONTINUE_NO_BREAKS
},
);
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(InFile::new(file.file_id, first), &mut |token, _ctx| {
scratch.push(token);
CONTINUE_NO_BREAKS
});
self.descend_into_macros_all(
InFile::new(file.file_id, first),
false,
&mut |token, _ctx| scratch.push(token),
);
let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(
self.descend_into_macros_all(
InFile::new(file.file_id, last),
false,
&mut |InFile { value: last, file_id: last_fid }, _ctx| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid {
@ -917,17 +934,18 @@ impl<'db> SemanticsImpl<'db> {
}
}
}
CONTINUE_NO_BREAKS
},
);
}
res
}
pub fn is_inside_macro_call(&self, token: InFile<&SyntaxToken>) -> bool {
// FIXME: Maybe `ancestors_with_macros()` is more suitable here? Currently
// this is only used on real (not macro) files so this is not a problem.
token.value.parent_ancestors().any(|ancestor| {
/// Returns true if the given input is within a macro call.
///
/// Note that if this token itself is within the context of a macro expansion does not matter.
/// That is, we strictly check if it lies inside the input of a macro call.
pub fn is_inside_macro_call(&self, token @ InFile { value, .. }: InFile<&SyntaxToken>) -> bool {
value.parent_ancestors().any(|ancestor| {
if ast::MacroCall::can_cast(ancestor.kind()) {
return true;
}
@ -952,7 +970,7 @@ impl<'db> SemanticsImpl<'db> {
ast::Item::Union(it) => it.into(),
_ => return false,
};
ctx.has_derives(token.with_value(&adt))
ctx.file_of_adt_has_derives(token.with_value(&adt))
})
})
}
@ -962,18 +980,18 @@ impl<'db> SemanticsImpl<'db> {
token: SyntaxToken,
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext),
) {
self.descend_into_macros_impl(self.wrap_token_infile(token), &mut |t, ctx| {
cb(t, ctx);
CONTINUE_NO_BREAKS
self.descend_into_macros_all(self.wrap_token_infile(token), false, &mut |t, ctx| {
cb(t, ctx)
});
}
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![];
self.descend_into_macros_impl(self.wrap_token_infile(token.clone()), &mut |t, _ctx| {
res.push(t.value);
CONTINUE_NO_BREAKS
});
self.descend_into_macros_all(
self.wrap_token_infile(token.clone()),
false,
&mut |t, _ctx| res.push(t.value),
);
if res.is_empty() {
res.push(token);
}
@ -986,12 +1004,11 @@ impl<'db> SemanticsImpl<'db> {
) -> SmallVec<[InFile<SyntaxToken>; 1]> {
let mut res = smallvec![];
let token = self.wrap_token_infile(token);
self.descend_into_macros_impl(token.clone(), &mut |t, ctx| {
self.descend_into_macros_all(token.clone(), true, &mut |t, ctx| {
if !ctx.is_opaque(self.db) {
// Don't descend into opaque contexts
res.push(t);
}
CONTINUE_NO_BREAKS
});
if res.is_empty() {
res.push(token);
@ -1004,7 +1021,7 @@ impl<'db> SemanticsImpl<'db> {
token: InFile<SyntaxToken>,
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
) -> Option<T> {
self.descend_into_macros_impl(token, &mut cb)
self.descend_into_macros_impl(token, false, &mut cb)
}
/// Descends the token into expansions, returning the tokens that matches the input
@ -1074,44 +1091,56 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or(token)
}
fn descend_into_macros_all(
&self,
token: InFile<SyntaxToken>,
always_descend_into_derives: bool,
f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext),
) {
self.descend_into_macros_impl(token, always_descend_into_derives, &mut |tok, ctx| {
f(tok, ctx);
CONTINUE_NO_BREAKS
});
}
fn descend_into_macros_impl<T>(
&self,
InFile { value: token, file_id }: InFile<SyntaxToken>,
always_descend_into_derives: bool,
f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
) -> Option<T> {
let _p = tracing::info_span!("descend_into_macros_impl").entered();
let span = self.db.span_map(file_id).span_for_range(token.text_range());
let db = self.db;
let span = db.span_map(file_id).span_for_range(token.text_range());
// Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| {
Some(
ctx.cache
.get_or_insert_expansion(ctx.db, macro_file)
.map_range_down(span)?
.map(SmallVec::<[_; 2]>::from_iter),
)
})?;
// we have found a mapping for the token if the vec is non-empty
let res = mapped_tokens.is_empty().not().then_some(());
// requeue the tokens we got from mapping our current token down
stack.push((HirFileId::from(file_id), mapped_tokens));
res
};
let process_expansion_for_token =
|ctx: &mut SourceToDefCtx<'_, '_>, stack: &mut Vec<_>, macro_file| {
let InMacroFile { file_id, value: mapped_tokens } = ctx
.cache
.get_or_insert_expansion(ctx.db, macro_file)
.map_range_down(span)?
.map(SmallVec::<[_; 2]>::from_iter);
// we have found a mapping for the token if the vec is non-empty
let res = mapped_tokens.is_empty().not().then_some(());
// requeue the tokens we got from mapping our current token down
stack.push((HirFileId::from(file_id), mapped_tokens));
res
};
// A stack of tokens to process, along with the file they came from
// These are tracked to know which macro calls we still have to look into
// the tokens themselves aren't that interesting as the span that is being used to map
// things down never changes.
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![];
let include = file_id.file_id().and_then(|file_id| {
self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, file_id)
});
let include = file_id
.file_id()
.and_then(|file_id| self.s2d_cache.borrow_mut().get_or_insert_include_for(db, file_id));
match include {
Some(include) => {
// include! inputs are always from real files, so they only need to be handled once upfront
process_expansion_for_token(&mut stack, include)?;
self.with_ctx(|ctx| process_expansion_for_token(ctx, &mut stack, include))?;
}
None => {
stack.push((file_id, smallvec![(token, span.ctx)]));
@ -1133,62 +1162,120 @@ impl<'db> SemanticsImpl<'db> {
tokens.reverse();
while let Some((token, ctx)) = tokens.pop() {
let was_not_remapped = (|| {
// First expand into attribute invocations
let containing_attribute_macro_call = self.with_ctx(|ctx| {
token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
// Don't force populate the dyn cache for items that don't have an attribute anyways
item.attrs().next()?;
Some((ctx.item_to_macro_call(InFile::new(expansion, &item))?, item))
})
});
if let Some((call_id, item)) = containing_attribute_macro_call {
let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
invoc_attr_index.ast_index()
}
_ => 0,
};
// FIXME: here, the attribute's text range is used to strip away all
// entries from the start of the attribute "list" up the invoking
// attribute. But in
// ```
// mod foo {
// #![inner]
// }
// ```
// we don't wanna strip away stuff in the `mod foo {` range, that is
// here if the id corresponds to an inner attribute we got strip all
// text ranges of the outer ones, and then all of the inner ones up
// to the invoking attribute so that the inbetween is ignored.
let text_range = item.syntax().text_range();
let start = collect_attrs(&item)
.nth(attr_id)
.map(|attr| match attr.1 {
Either::Left(it) => it.syntax().text_range().start(),
Either::Right(it) => it.syntax().text_range().start(),
// First expand into attribute invocations, this is required to be handled
// upfront as any other macro call within will not semantically resolve unless
// also descended.
let res = self.with_ctx(|ctx| {
token
.parent_ancestors()
.filter_map(ast::Item::cast)
// FIXME: This might work incorrectly when we have a derive, followed by
// an attribute on an item, like:
// ```
// #[derive(Debug$0)]
// #[my_attr]
// struct MyStruct;
// ```
// here we should not consider the attribute at all, as our cursor
// technically lies outside of its expansion
.find_map(|item| {
// Don't force populate the dyn cache for items that don't have an attribute anyways
item.attrs().next()?;
ctx.item_to_macro_call(InFile::new(expansion, &item))
.zip(Some(item))
})
.unwrap_or_else(|| text_range.start());
let text_range = TextRange::new(start, text_range.end());
filter_duplicates(tokens, text_range);
return process_expansion_for_token(&mut stack, call_id);
.map(|(call_id, item)| {
let attr_id = match db.lookup_intern_macro_call(call_id).kind {
hir_expand::MacroCallKind::Attr {
invoc_attr_index, ..
} => invoc_attr_index.ast_index(),
_ => 0,
};
// FIXME: here, the attribute's text range is used to strip away all
// entries from the start of the attribute "list" up the invoking
// attribute. But in
// ```
// mod foo {
// #![inner]
// }
// ```
// we don't wanna strip away stuff in the `mod foo {` range, that is
// here if the id corresponds to an inner attribute we got strip all
// text ranges of the outer ones, and then all of the inner ones up
// to the invoking attribute so that the inbetween is ignored.
let text_range = item.syntax().text_range();
let start = collect_attrs(&item)
.nth(attr_id)
.map(|attr| match attr.1 {
Either::Left(it) => it.syntax().text_range().start(),
Either::Right(it) => it.syntax().text_range().start(),
})
.unwrap_or_else(|| text_range.start());
let text_range = TextRange::new(start, text_range.end());
filter_duplicates(tokens, text_range);
process_expansion_for_token(ctx, &mut stack, call_id)
})
});
if let Some(res) = res {
return res;
}
if always_descend_into_derives {
let res = self.with_ctx(|ctx| {
let (derives, adt) = token
.parent_ancestors()
.filter_map(ast::Adt::cast)
.find_map(|adt| {
Some((
ctx.derive_macro_calls(InFile::new(expansion, &adt))?
.map(|(a, b, c)| (a, b, c.to_owned()))
.collect::<SmallVec<[_; 2]>>(),
adt,
))
})?;
let mut res = None;
for (_, derive_attr, derives) in derives {
// as there may be multiple derives registering the same helper
// name, we gotta make sure to call this for all of them!
// FIXME: We need to call `f` for all of them as well though!
res = res.or(process_expansion_for_token(
ctx,
&mut stack,
derive_attr,
));
for derive in derives.into_iter().flatten() {
res = res
.or(process_expansion_for_token(ctx, &mut stack, derive));
}
}
// remove all tokens that are within the derives expansion
filter_duplicates(tokens, adt.syntax().text_range());
Some(res)
});
// if we found derives, we can early exit. There is no way we can be in any
// macro call at this point given we are not in a token tree
if let Some(res) = res {
return res;
}
}
// Then check for token trees, that means we are either in a function-like macro or
// secondary attribute inputs
let tt = token
.parent_ancestors()
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
.last()?;
match tt {
// function-like macro call
Either::Left(tt) => {
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
if tt.left_delimiter_token().map_or(false, |it| it == token) {
return None;
}
if tt.right_delimiter_token().map_or(false, |it| it == token) {
return None;
}
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
let mcall = InFile::new(expansion, macro_call);
let file_id = match m_cache.get(&mcall) {
Some(&it) => it,
@ -1201,13 +1288,16 @@ impl<'db> SemanticsImpl<'db> {
let text_range = tt.syntax().text_range();
filter_duplicates(tokens, text_range);
process_expansion_for_token(&mut stack, file_id).or(file_id
.eager_arg(self.db)
.and_then(|arg| {
// also descend into eager expansions
process_expansion_for_token(&mut stack, arg)
}))
self.with_ctx(|ctx| {
process_expansion_for_token(ctx, &mut stack, file_id).or(file_id
.eager_arg(db)
.and_then(|arg| {
// also descend into eager expansions
process_expansion_for_token(ctx, &mut stack, arg)
}))
})
}
Either::Right(_) if always_descend_into_derives => None,
// derive or derive helper
Either::Right(meta) => {
// attribute we failed expansion for earlier, this might be a derive invocation
@ -1216,31 +1306,33 @@ impl<'db> SemanticsImpl<'db> {
let adt = match attr.syntax().parent().and_then(ast::Adt::cast) {
Some(adt) => {
// this might be a derive on an ADT
let derive_call = self.with_ctx(|ctx| {
let res = self.with_ctx(|ctx| {
// so try downmapping the token into the pseudo derive expansion
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
ctx.attr_to_derive_macro_call(
InFile::new(expansion, &adt),
InFile::new(expansion, attr.clone()),
)
.map(|(_, call_id, _)| call_id)
});
let derive_call = ctx
.attr_to_derive_macro_call(
InFile::new(expansion, &adt),
InFile::new(expansion, attr.clone()),
)?
.1;
match derive_call {
Some(call_id) => {
// resolved to a derive
let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this
tokens.retain(|(t, _)| {
!text_range.contains_range(t.text_range())
});
return process_expansion_for_token(
&mut stack, call_id,
);
}
None => Some(adt),
// resolved to a derive
let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this
tokens.retain(|(t, _)| {
!text_range.contains_range(t.text_range())
});
Some(process_expansion_for_token(
ctx,
&mut stack,
derive_call,
))
});
if let Some(res) = res {
return res;
}
Some(adt)
}
None => {
// Otherwise this could be a derive helper on a variant or field
@ -1254,12 +1346,9 @@ impl<'db> SemanticsImpl<'db> {
)
}
}?;
if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(expansion, &adt))) {
return None;
}
let attr_name =
attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
// Not an attribute, nor a derive, so it's either an intert attribute or a derive helper
// Not an attribute, nor a derive, so it's either an inert attribute or a derive helper
// Try to resolve to a derive helper and downmap
let resolver = &token
.parent()
@ -1267,7 +1356,7 @@ impl<'db> SemanticsImpl<'db> {
self.analyze_impl(InFile::new(expansion, &parent), None, false)
})?
.resolver;
let id = self.db.ast_id_map(expansion).ast_id(&adt);
let id = db.ast_id_map(expansion).ast_id(&adt);
let helpers = resolver
.def_map()
.derive_helpers_in_scope(InFile::new(expansion, id))?;
@ -1278,20 +1367,22 @@ impl<'db> SemanticsImpl<'db> {
}
let mut res = None;
for (.., derive) in
helpers.iter().filter(|(helper, ..)| *helper == attr_name)
{
// as there may be multiple derives registering the same helper
// name, we gotta make sure to call this for all of them!
// FIXME: We need to call `f` for all of them as well though!
res = res.or(process_expansion_for_token(&mut stack, *derive));
}
res
self.with_ctx(|ctx| {
for (.., derive) in
helpers.iter().filter(|(helper, ..)| *helper == attr_name)
{
// as there may be multiple derives registering the same helper
// name, we gotta make sure to call this for all of them!
// FIXME: We need to call `f` for all of them as well though!
res = res
.or(process_expansion_for_token(ctx, &mut stack, *derive));
}
res
})
}
}
})()
.is_none();
if was_not_remapped {
if let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) {
return Some(b);
@ -1380,25 +1471,31 @@ impl<'db> SemanticsImpl<'db> {
}
/// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
// FIXME: Replace with `ancestors_with_macros_file` when all usages are updated.
pub fn ancestors_with_macros(
&self,
node: SyntaxNode,
) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
let node = self.find_file(&node);
iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| {
match value.parent() {
Some(parent) => Some(InFile::new(file_id, parent)),
None => {
let macro_file = file_id.macro_file()?;
self.ancestors_with_macros_file(node.cloned()).map(|it| it.value)
}
self.with_ctx(|ctx| {
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
expansion_info.arg().map(|node| node?.parent()).transpose()
})
}
/// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
pub fn ancestors_with_macros_file(
&self,
node: InFile<SyntaxNode>,
) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
iter::successors(Some(node), move |&InFile { file_id, ref value }| match value.parent() {
Some(parent) => Some(InFile::new(file_id, parent)),
None => {
let macro_file = file_id.macro_file()?;
self.with_ctx(|ctx| {
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
expansion_info.arg().map(|node| node?.parent()).transpose()
})
}
})
.map(|it| it.value)
}
pub fn ancestors_at_offset_with_macros(
@ -1851,18 +1948,12 @@ impl<'db> SemanticsImpl<'db> {
ChildContainer::TraitId(it) => {
return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
}
ChildContainer::TraitAliasId(it) => {
return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
}
ChildContainer::ImplId(it) => {
return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
}
ChildContainer::EnumId(it) => {
return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
}
ChildContainer::TypeAliasId(it) => {
return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
}
ChildContainer::GenericDefId(it) => {
return Some(SourceAnalyzer::new_generic_def(self.db, it, node, offset));
}

View file

@ -17,6 +17,7 @@ use hir_def::{
DynMap,
keys::{self, Key},
},
hir::generics::GenericParams,
item_scope::ItemScope,
item_tree::ItemTreeNode,
nameres::DefMap,
@ -49,6 +50,12 @@ impl ChildBySource for TraitId {
data.items.iter().for_each(|&(_, item)| {
add_assoc_item(db, res, file_id, item);
});
let (_, source_map) = db.trait_signature_with_source_map(*self);
source_map.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(
|(ast, &exp_id)| {
res[keys::MACRO_CALL].insert(ast.value, exp_id);
},
);
}
}
@ -68,6 +75,12 @@ impl ChildBySource for ImplId {
data.items.iter().for_each(|&(_, item)| {
add_assoc_item(db, res, file_id, item);
});
let (_, source_map) = db.impl_signature_with_source_map(*self);
source_map.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(
|(ast, &exp_id)| {
res[keys::MACRO_CALL].insert(ast.value, exp_id);
},
);
}
}
@ -178,6 +191,8 @@ impl ChildBySource for VariantId {
Either::Right(source) => res[keys::RECORD_FIELD].insert(AstPtr::new(&source), id),
}
}
let (_, sm) = db.variant_fields_with_source_map(*self);
sm.expansions().for_each(|(ast, &exp_id)| res[keys::MACRO_CALL].insert(ast.value, exp_id));
}
}
@ -195,6 +210,11 @@ impl ChildBySource for EnumId {
res[keys::ENUM_VARIANT]
.insert(ast_id_map.get(tree[variant.lookup(db).id.value].ast_id), variant);
});
let (_, source_map) = db.enum_signature_with_source_map(*self);
source_map
.expansions()
.filter(|(ast, _)| ast.file_id == file_id)
.for_each(|(ast, &exp_id)| res[keys::MACRO_CALL].insert(ast.value, exp_id));
}
}
@ -225,7 +245,8 @@ impl ChildBySource for GenericDefId {
return;
}
let generic_params = db.generic_params(*self);
let (generic_params, _, source_map) =
GenericParams::generic_params_and_store_and_source_map(db, *self);
let mut toc_idx_iter = generic_params.iter_type_or_consts().map(|(idx, _)| idx);
let lts_idx_iter = generic_params.iter_lt().map(|(idx, _)| idx);
@ -253,6 +274,11 @@ impl ChildBySource for GenericDefId {
res[keys::LIFETIME_PARAM].insert(AstPtr::new(&ast_param), id);
}
}
source_map
.expansions()
.filter(|(ast, _)| ast.file_id == file_id)
.for_each(|(ast, &exp_id)| res[keys::MACRO_CALL].insert(ast.value, exp_id));
}
}

View file

@ -108,7 +108,7 @@ use span::FileId;
use stdx::impl_from;
use syntax::{
AstNode, AstPtr, SyntaxNode,
ast::{self, HasName},
ast::{self, HasAttrs, HasName},
};
use tt::TextRange;
@ -411,10 +411,25 @@ impl SourceToDefCtx<'_, '_> {
.map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
}
pub(super) fn has_derives(&mut self, adt: InFile<&ast::Adt>) -> bool {
// FIXME: Make this more fine grained! This should be a `adt_has_derives`!
pub(super) fn file_of_adt_has_derives(&mut self, adt: InFile<&ast::Adt>) -> bool {
self.dyn_map(adt).as_ref().is_some_and(|map| !map[keys::DERIVE_MACRO_CALL].is_empty())
}
pub(super) fn derive_macro_calls<'slf>(
&'slf mut self,
adt: InFile<&ast::Adt>,
) -> Option<impl Iterator<Item = (AttrId, MacroCallId, &'slf [Option<MacroCallId>])> + use<'slf>>
{
self.dyn_map(adt).as_ref().map(|&map| {
let dyn_map = &map[keys::DERIVE_MACRO_CALL];
adt.value
.attrs()
.filter_map(move |attr| dyn_map.get(&AstPtr::new(&attr)))
.map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
})
}
fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>(
&mut self,
src: InFile<&Ast>,
@ -616,14 +631,14 @@ impl SourceToDefCtx<'_, '_> {
match &item {
ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(),
ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(),
ast::Item::TraitAlias(it) => {
self.trait_alias_to_def(container.with_value(it))?.into()
}
ast::Item::Impl(it) => self.impl_to_def(container.with_value(it))?.into(),
ast::Item::Enum(it) => self.enum_to_def(container.with_value(it))?.into(),
ast::Item::TypeAlias(it) => {
self.type_alias_to_def(container.with_value(it))?.into()
}
ast::Item::TypeAlias(it) => ChildContainer::GenericDefId(
self.type_alias_to_def(container.with_value(it))?.into(),
),
ast::Item::TraitAlias(it) => ChildContainer::GenericDefId(
self.trait_alias_to_def(container.with_value(it))?.into(),
),
ast::Item::Struct(it) => {
let def = self.struct_to_def(container.with_value(it))?;
let is_in_body = it.field_list().is_some_and(|it| {
@ -723,11 +738,9 @@ pub(crate) enum ChildContainer {
DefWithBodyId(DefWithBodyId),
ModuleId(ModuleId),
TraitId(TraitId),
TraitAliasId(TraitAliasId),
ImplId(ImplId),
EnumId(EnumId),
VariantId(VariantId),
TypeAliasId(TypeAliasId),
/// XXX: this might be the same def as, for example an `EnumId`. However,
/// here the children are generic parameters, and not, eg enum variants.
GenericDefId(GenericDefId),
@ -736,11 +749,9 @@ impl_from! {
DefWithBodyId,
ModuleId,
TraitId,
TraitAliasId,
ImplId,
EnumId,
VariantId,
TypeAliasId,
GenericDefId
for ChildContainer
}
@ -752,11 +763,9 @@ impl ChildContainer {
ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),
ChildContainer::ModuleId(it) => it.child_by_source(db, file_id),
ChildContainer::TraitId(it) => it.child_by_source(db, file_id),
ChildContainer::TraitAliasId(_) => DynMap::default(),
ChildContainer::ImplId(it) => it.child_by_source(db, file_id),
ChildContainer::EnumId(it) => it.child_by_source(db, file_id),
ChildContainer::VariantId(it) => it.child_by_source(db, file_id),
ChildContainer::TypeAliasId(_) => DynMap::default(),
ChildContainer::GenericDefId(it) => it.child_by_source(db, file_id),
}
}

View file

@ -56,14 +56,22 @@ pub(crate) fn convert_named_struct_to_tuple_struct(
// XXX: We don't currently provide this assist for struct definitions inside macros, but if we
// are to lift this limitation, don't forget to make `edit_struct_def()` consider macro files
// too.
let name = ctx.find_node_at_offset::<ast::Name>()?;
let strukt = name.syntax().parent().and_then(<Either<ast::Struct, ast::Variant>>::cast)?;
let field_list = strukt.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
let strukt_or_variant = ctx
.find_node_at_offset::<ast::Struct>()
.map(Either::Left)
.or_else(|| ctx.find_node_at_offset::<ast::Variant>().map(Either::Right))?;
let field_list = strukt_or_variant.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
if ctx.offset() > field_list.syntax().text_range().start() {
// Assist could be distracting after the braces
return None;
}
let record_fields = match field_list {
ast::FieldList::RecordFieldList(it) => it,
ast::FieldList::TupleFieldList(_) => return None,
};
let strukt_def = match &strukt {
let strukt_def = match &strukt_or_variant {
Either::Left(s) => Either::Left(ctx.sema.to_def(s)?),
Either::Right(v) => Either::Right(ctx.sema.to_def(v)?),
};
@ -71,11 +79,11 @@ pub(crate) fn convert_named_struct_to_tuple_struct(
acc.add(
AssistId::refactor_rewrite("convert_named_struct_to_tuple_struct"),
"Convert to tuple struct",
strukt.syntax().text_range(),
strukt_or_variant.syntax().text_range(),
|edit| {
edit_field_references(ctx, edit, record_fields.fields());
edit_struct_references(ctx, edit, strukt_def);
edit_struct_def(ctx, edit, &strukt, record_fields);
edit_struct_def(ctx, edit, &strukt_or_variant, record_fields);
},
)
}
@ -277,6 +285,88 @@ impl A {
struct Inner;
struct A(Inner);
impl A {
fn new(inner: Inner) -> A {
A(inner)
}
fn new_with_default() -> A {
A::new(Inner)
}
fn into_inner(self) -> Inner {
self.0
}
}"#,
);
}
#[test]
fn convert_simple_struct_cursor_on_struct_keyword() {
check_assist(
convert_named_struct_to_tuple_struct,
r#"
struct Inner;
struct$0 A { inner: Inner }
impl A {
fn new(inner: Inner) -> A {
A { inner }
}
fn new_with_default() -> A {
A::new(Inner)
}
fn into_inner(self) -> Inner {
self.inner
}
}"#,
r#"
struct Inner;
struct A(Inner);
impl A {
fn new(inner: Inner) -> A {
A(inner)
}
fn new_with_default() -> A {
A::new(Inner)
}
fn into_inner(self) -> Inner {
self.0
}
}"#,
);
}
#[test]
fn convert_simple_struct_cursor_on_visibility_keyword() {
check_assist(
convert_named_struct_to_tuple_struct,
r#"
struct Inner;
pub$0 struct A { inner: Inner }
impl A {
fn new(inner: Inner) -> A {
A { inner }
}
fn new_with_default() -> A {
A::new(Inner)
}
fn into_inner(self) -> Inner {
self.inner
}
}"#,
r#"
struct Inner;
pub struct A(Inner);
impl A {
fn new(inner: Inner) -> A {
A(inner)

View file

@ -51,18 +51,26 @@ pub(crate) fn convert_tuple_struct_to_named_struct(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let name = ctx.find_node_at_offset::<ast::Name>()?;
let strukt = name.syntax().parent().and_then(<Either<ast::Struct, ast::Variant>>::cast)?;
let field_list = strukt.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
let strukt_or_variant = ctx
.find_node_at_offset::<ast::Struct>()
.map(Either::Left)
.or_else(|| ctx.find_node_at_offset::<ast::Variant>().map(Either::Right))?;
let field_list = strukt_or_variant.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
if ctx.offset() > field_list.syntax().text_range().start() {
// Assist could be distracting after the braces
return None;
}
let tuple_fields = match field_list {
ast::FieldList::TupleFieldList(it) => it,
ast::FieldList::RecordFieldList(_) => return None,
};
let strukt_def = match &strukt {
let strukt_def = match &strukt_or_variant {
Either::Left(s) => Either::Left(ctx.sema.to_def(s)?),
Either::Right(v) => Either::Right(ctx.sema.to_def(v)?),
};
let target = strukt.as_ref().either(|s| s.syntax(), |v| v.syntax()).text_range();
let target = strukt_or_variant.as_ref().either(|s| s.syntax(), |v| v.syntax()).text_range();
acc.add(
AssistId::refactor_rewrite("convert_tuple_struct_to_named_struct"),
@ -72,7 +80,7 @@ pub(crate) fn convert_tuple_struct_to_named_struct(
let names = generate_names(tuple_fields.fields());
edit_field_references(ctx, edit, tuple_fields.fields(), &names);
edit_struct_references(ctx, edit, strukt_def, &names);
edit_struct_def(ctx, edit, &strukt, tuple_fields, names);
edit_struct_def(ctx, edit, &strukt_or_variant, tuple_fields, names);
},
)
}
@ -300,6 +308,88 @@ impl A {
struct Inner;
struct A { field1: Inner }
impl A {
fn new(inner: Inner) -> A {
A { field1: inner }
}
fn new_with_default() -> A {
A::new(Inner)
}
fn into_inner(self) -> Inner {
self.field1
}
}"#,
);
}
#[test]
fn convert_simple_struct_cursor_on_struct_keyword() {
check_assist(
convert_tuple_struct_to_named_struct,
r#"
struct Inner;
struct$0 A(Inner);
impl A {
fn new(inner: Inner) -> A {
A(inner)
}
fn new_with_default() -> A {
A::new(Inner)
}
fn into_inner(self) -> Inner {
self.0
}
}"#,
r#"
struct Inner;
struct A { field1: Inner }
impl A {
fn new(inner: Inner) -> A {
A { field1: inner }
}
fn new_with_default() -> A {
A::new(Inner)
}
fn into_inner(self) -> Inner {
self.field1
}
}"#,
);
}
#[test]
fn convert_simple_struct_cursor_on_visibility_keyword() {
check_assist(
convert_tuple_struct_to_named_struct,
r#"
struct Inner;
pub$0 struct A(Inner);
impl A {
fn new(inner: Inner) -> A {
A(inner)
}
fn new_with_default() -> A {
A::new(Inner)
}
fn into_inner(self) -> Inner {
self.0
}
}"#,
r#"
struct Inner;
pub struct A { field1: Inner }
impl A {
fn new(inner: Inner) -> A {
A { field1: inner }

View file

@ -7,10 +7,10 @@ use syntax::{
use crate::{AssistContext, AssistId, Assists};
// FIXME: this really should be a fix for diagnostic, rather than an assist.
// Assist: fix_visibility
//
// Note that there is some duplication between this and the no_such_field diagnostic.
//
// Makes inaccessible item public.
//
// ```
@ -32,7 +32,6 @@ use crate::{AssistContext, AssistId, Assists};
// ```
pub(crate) fn fix_visibility(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
add_vis_to_referenced_module_def(acc, ctx)
.or_else(|| add_vis_to_referenced_record_field(acc, ctx))
}
fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
@ -88,59 +87,6 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
})
}
fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let record_field: ast::RecordExprField = ctx.find_node_at_offset()?;
let (record_field_def, _, _) = ctx.sema.resolve_record_field(&record_field)?;
let current_module = ctx.sema.scope(record_field.syntax())?.module();
let current_edition = current_module.krate().edition(ctx.db());
let visibility = record_field_def.visibility(ctx.db());
if visibility.is_visible_from(ctx.db(), current_module.into()) {
return None;
}
let parent = record_field_def.parent_def(ctx.db());
let parent_name = parent.name(ctx.db());
let target_module = parent.module(ctx.db());
let in_file_source = record_field_def.source(ctx.db())?;
let (vis_owner, target) = match in_file_source.value {
hir::FieldSource::Named(it) => {
let range = it.syntax().text_range();
(ast::AnyHasVisibility::new(it), range)
}
hir::FieldSource::Pos(it) => {
let range = it.syntax().text_range();
(ast::AnyHasVisibility::new(it), range)
}
};
let missing_visibility = if current_module.krate() == target_module.krate() {
make::visibility_pub_crate()
} else {
make::visibility_pub()
};
let target_file = in_file_source.file_id.original_file(ctx.db());
let target_name = record_field_def.name(ctx.db());
let assist_label = format!(
"Change visibility of {}.{} to {missing_visibility}",
parent_name.display(ctx.db(), current_edition),
target_name.display(ctx.db(), current_edition)
);
acc.add(AssistId::quick_fix("fix_visibility"), assist_label, target, |edit| {
edit.edit_file(target_file.file_id(ctx.db()));
let vis_owner = edit.make_mut(vis_owner);
vis_owner.set_visibility(Some(missing_visibility.clone_for_update()));
if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) {
edit.add_tabstop_before(cap, vis);
}
})
}
fn target_data_for_def(
db: &dyn HirDatabase,
def: hir::ModuleDef,
@ -293,44 +239,6 @@ struct Foo;
);
}
#[test]
fn fix_visibility_of_struct_field() {
check_assist(
fix_visibility,
r"mod foo { pub struct Foo { bar: (), } }
fn main() { foo::Foo { $0bar: () }; } ",
r"mod foo { pub struct Foo { $0pub(crate) bar: (), } }
fn main() { foo::Foo { bar: () }; } ",
);
check_assist(
fix_visibility,
r"
//- /lib.rs
mod foo;
fn main() { foo::Foo { $0bar: () }; }
//- /foo.rs
pub struct Foo { bar: () }
",
r"pub struct Foo { $0pub(crate) bar: () }
",
);
check_assist_not_applicable(
fix_visibility,
r"mod foo { pub struct Foo { pub bar: (), } }
fn main() { foo::Foo { $0bar: () }; } ",
);
check_assist_not_applicable(
fix_visibility,
r"
//- /lib.rs
mod foo;
fn main() { foo::Foo { $0bar: () }; }
//- /foo.rs
pub struct Foo { pub bar: () }
",
);
}
#[test]
fn fix_visibility_of_enum_variant_field() {
// Enum variants, as well as their fields, always get the enum's visibility. In fact, rustc
@ -367,44 +275,6 @@ pub struct Foo { pub bar: () }
);
}
#[test]
fn fix_visibility_of_union_field() {
check_assist(
fix_visibility,
r"mod foo { pub union Foo { bar: (), } }
fn main() { foo::Foo { $0bar: () }; } ",
r"mod foo { pub union Foo { $0pub(crate) bar: (), } }
fn main() { foo::Foo { bar: () }; } ",
);
check_assist(
fix_visibility,
r"
//- /lib.rs
mod foo;
fn main() { foo::Foo { $0bar: () }; }
//- /foo.rs
pub union Foo { bar: () }
",
r"pub union Foo { $0pub(crate) bar: () }
",
);
check_assist_not_applicable(
fix_visibility,
r"mod foo { pub union Foo { pub bar: (), } }
fn main() { foo::Foo { $0bar: () }; } ",
);
check_assist_not_applicable(
fix_visibility,
r"
//- /lib.rs
mod foo;
fn main() { foo::Foo { $0bar: () }; }
//- /foo.rs
pub union Foo { pub bar: () }
",
);
}
#[test]
fn fix_visibility_of_const() {
check_assist(
@ -570,19 +440,6 @@ foo::Bar$0
pub(crate) struct Bar;
",
r"$0pub struct Bar;
",
);
check_assist(
fix_visibility,
r"
//- /main.rs crate:a deps:foo
fn main() {
foo::Foo { $0bar: () };
}
//- /lib.rs crate:foo
pub struct Foo { pub(crate) bar: () }
",
r"pub struct Foo { $0pub bar: () }
",
);
}

View file

@ -1,6 +1,7 @@
use ide_db::{
assists::AssistId,
defs::{Definition, NameClass, NameRefClass},
rename::RenameDefinition,
};
use syntax::{AstNode, ast};
@ -61,7 +62,7 @@ pub(crate) fn remove_underscore(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
"Remove underscore from a used variable",
text_range,
|builder| {
let changes = def.rename(&ctx.sema, new_name).unwrap();
let changes = def.rename(&ctx.sema, new_name, RenameDefinition::Yes).unwrap();
builder.source_change = changes;
},
)

View file

@ -25,6 +25,7 @@ use crate::{
mod cfg;
mod derive;
mod diagnostic;
mod lint;
mod macro_use;
mod repr;
@ -40,23 +41,22 @@ pub(crate) fn complete_known_attribute_input(
extern_crate: Option<&ast::ExternCrate>,
) -> Option<()> {
let attribute = fake_attribute_under_caret;
let name_ref = match attribute.path() {
Some(p) => Some(p.as_single_name_ref()?),
None => None,
};
let (path, tt) = name_ref.zip(attribute.token_tree())?;
tt.l_paren_token()?;
let path = attribute.path()?;
let segments = path.segments().map(|s| s.name_ref()).collect::<Option<Vec<_>>>()?;
let segments = segments.iter().map(|n| n.text()).collect::<Vec<_>>();
let segments = segments.iter().map(|t| t.as_str()).collect::<Vec<_>>();
let tt = attribute.token_tree()?;
match path.text().as_str() {
"repr" => repr::complete_repr(acc, ctx, tt),
"feature" => lint::complete_lint(
match segments.as_slice() {
["repr"] => repr::complete_repr(acc, ctx, tt),
["feature"] => lint::complete_lint(
acc,
ctx,
colon_prefix,
&parse_tt_as_comma_sep_paths(tt, ctx.edition)?,
FEATURES,
),
"allow" | "expect" | "deny" | "forbid" | "warn" => {
["allow"] | ["expect"] | ["deny"] | ["forbid"] | ["warn"] => {
let existing_lints = parse_tt_as_comma_sep_paths(tt, ctx.edition)?;
let lints: Vec<Lint> = CLIPPY_LINT_GROUPS
@ -70,13 +70,14 @@ pub(crate) fn complete_known_attribute_input(
lint::complete_lint(acc, ctx, colon_prefix, &existing_lints, &lints);
}
"cfg" => cfg::complete_cfg(acc, ctx),
"macro_use" => macro_use::complete_macro_use(
["cfg"] => cfg::complete_cfg(acc, ctx),
["macro_use"] => macro_use::complete_macro_use(
acc,
ctx,
extern_crate,
&parse_tt_as_comma_sep_paths(tt, ctx.edition)?,
),
["diagnostic", "on_unimplemented"] => diagnostic::complete_on_unimplemented(acc, ctx, tt),
_ => (),
}
Some(())
@ -139,6 +140,8 @@ pub(crate) fn complete_attribute_path(
}
Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
}
let qualifier_path =
if let Qualified::With { path, .. } = qualified { Some(path) } else { None };
let attributes = annotated_item_kind.and_then(|kind| {
if ast::Expr::can_cast(kind) {
@ -149,18 +152,33 @@ pub(crate) fn complete_attribute_path(
});
let add_completion = |attr_completion: &AttrCompletion| {
let mut item = CompletionItem::new(
SymbolKind::Attribute,
ctx.source_range(),
attr_completion.label,
ctx.edition,
);
// if we don't already have the qualifiers of the completion, then
// add the missing parts to the label and snippet
let mut label = attr_completion.label.to_owned();
let mut snippet = attr_completion.snippet.map(|s| s.to_owned());
let segments = qualifier_path.iter().flat_map(|q| q.segments()).collect::<Vec<_>>();
let qualifiers = attr_completion.qualifiers;
let matching_qualifiers = segments
.iter()
.zip(qualifiers)
.take_while(|(s, q)| s.name_ref().is_some_and(|t| t.text() == **q))
.count();
if matching_qualifiers != qualifiers.len() {
let prefix = qualifiers[matching_qualifiers..].join("::");
label = format!("{prefix}::{label}");
if let Some(s) = snippet.as_mut() {
*s = format!("{prefix}::{s}");
}
}
let mut item =
CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label, ctx.edition);
if let Some(lookup) = attr_completion.lookup {
item.lookup_by(lookup);
}
if let Some((snippet, cap)) = attr_completion.snippet.zip(ctx.config.snippet_cap) {
if let Some((snippet, cap)) = snippet.zip(ctx.config.snippet_cap) {
item.insert_snippet(cap, snippet);
}
@ -184,6 +202,7 @@ struct AttrCompletion {
label: &'static str,
lookup: Option<&'static str>,
snippet: Option<&'static str>,
qualifiers: &'static [&'static str],
prefer_inner: bool,
}
@ -192,6 +211,10 @@ impl AttrCompletion {
self.lookup.unwrap_or(self.label)
}
const fn qualifiers(self, qualifiers: &'static [&'static str]) -> AttrCompletion {
AttrCompletion { qualifiers, ..self }
}
const fn prefer_inner(self) -> AttrCompletion {
AttrCompletion { prefer_inner: true, ..self }
}
@ -202,7 +225,7 @@ const fn attr(
lookup: Option<&'static str>,
snippet: Option<&'static str>,
) -> AttrCompletion {
AttrCompletion { label, lookup, snippet, prefer_inner: false }
AttrCompletion { label, lookup, snippet, qualifiers: &[], prefer_inner: false }
}
macro_rules! attrs {
@ -264,14 +287,14 @@ static KIND_TO_ATTRIBUTES: LazyLock<FxHashMap<SyntaxKind, &[&str]>> = LazyLock::
FN,
attrs!(
item, linkable,
"cold", "ignore", "inline", "must_use", "panic_handler", "proc_macro",
"cold", "ignore", "inline", "panic_handler", "proc_macro",
"proc_macro_derive", "proc_macro_attribute", "should_panic", "target_feature",
"test", "track_caller"
),
),
(STATIC, attrs!(item, linkable, "global_allocator", "used")),
(TRAIT, attrs!(item, "must_use")),
(IMPL, attrs!(item, "automatically_derived")),
(TRAIT, attrs!(item, "diagnostic::on_unimplemented")),
(IMPL, attrs!(item, "automatically_derived", "diagnostic::do_not_recommend")),
(ASSOC_ITEM_LIST, attrs!(item)),
(EXTERN_BLOCK, attrs!(item, "link")),
(EXTERN_ITEM_LIST, attrs!(item, "link")),
@ -311,6 +334,14 @@ const ATTRIBUTES: &[AttrCompletion] = &[
attr("deny(…)", Some("deny"), Some("deny(${0:lint})")),
attr(r#"deprecated"#, Some("deprecated"), Some(r#"deprecated"#)),
attr("derive(…)", Some("derive"), Some(r#"derive(${0:Debug})"#)),
attr("do_not_recommend", Some("diagnostic::do_not_recommend"), None)
.qualifiers(&["diagnostic"]),
attr(
"on_unimplemented",
Some("diagnostic::on_unimplemented"),
Some(r#"on_unimplemented(${0:keys})"#),
)
.qualifiers(&["diagnostic"]),
attr(r#"doc = "…""#, Some("doc"), Some(r#"doc = "${0:docs}""#)),
attr(r#"doc(alias = "…")"#, Some("docalias"), Some(r#"doc(alias = "${0:docs}")"#)),
attr(r#"doc(hidden)"#, Some("dochidden"), Some(r#"doc(hidden)"#)),

View file

@ -0,0 +1,60 @@
//! Completion for diagnostic attributes.
use ide_db::SymbolKind;
use syntax::ast;
use crate::{CompletionItem, Completions, context::CompletionContext};
use super::AttrCompletion;
pub(super) fn complete_on_unimplemented(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
input: ast::TokenTree,
) {
if let Some(existing_keys) = super::parse_comma_sep_expr(input) {
for attr in ATTRIBUTE_ARGS {
let already_annotated = existing_keys
.iter()
.filter_map(|expr| match expr {
ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
ast::Expr::BinExpr(bin)
if bin.op_kind() == Some(ast::BinaryOp::Assignment { op: None }) =>
{
match bin.lhs()? {
ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
_ => None,
}
}
_ => None,
})
.any(|it| {
let text = it.text();
attr.key() == text && text != "note"
});
if already_annotated {
continue;
}
let mut item = CompletionItem::new(
SymbolKind::BuiltinAttr,
ctx.source_range(),
attr.label,
ctx.edition,
);
if let Some(lookup) = attr.lookup {
item.lookup_by(lookup);
}
if let Some((snippet, cap)) = attr.snippet.zip(ctx.config.snippet_cap) {
item.insert_snippet(cap, snippet);
}
item.add_to(acc, ctx.db);
}
}
}
const ATTRIBUTE_ARGS: &[AttrCompletion] = &[
super::attr(r#"label = "…""#, Some("label"), Some(r#"label = "${0:label}""#)),
super::attr(r#"message = "…""#, Some("message"), Some(r#"message = "${0:message}""#)),
super::attr(r#"note = "…""#, Some("note"), Some(r#"note = "${0:note}""#)),
];

View file

@ -30,6 +30,8 @@ pub struct Foo(#[m$0] i32);
at deprecated
at derive macro derive
at derive()
at diagnostic::do_not_recommend
at diagnostic::on_unimplemented
at doc = ""
at doc(alias = "")
at doc(hidden)
@ -472,13 +474,13 @@ fn attr_on_trait() {
at cfg_attr()
at deny()
at deprecated
at diagnostic::on_unimplemented
at doc = ""
at doc(alias = "")
at doc(hidden)
at expect()
at forbid()
at must_use
at must_use
at no_mangle
at warn()
kw crate::
@ -498,6 +500,7 @@ fn attr_on_impl() {
at cfg_attr()
at deny()
at deprecated
at diagnostic::do_not_recommend
at doc = ""
at doc(alias = "")
at doc(hidden)
@ -532,6 +535,76 @@ fn attr_on_impl() {
);
}
#[test]
fn attr_with_qualifier() {
check(
r#"#[diagnostic::$0] impl () {}"#,
expect![[r#"
at allow()
at automatically_derived
at cfg()
at cfg_attr()
at deny()
at deprecated
at do_not_recommend
at doc = ""
at doc(alias = "")
at doc(hidden)
at expect()
at forbid()
at must_use
at no_mangle
at warn()
"#]],
);
check(
r#"#[diagnostic::$0] trait Foo {}"#,
expect![[r#"
at allow()
at cfg()
at cfg_attr()
at deny()
at deprecated
at doc = ""
at doc(alias = "")
at doc(hidden)
at expect()
at forbid()
at must_use
at no_mangle
at on_unimplemented
at warn()
"#]],
);
}
#[test]
fn attr_diagnostic_on_unimplemented() {
check(
r#"#[diagnostic::on_unimplemented($0)] trait Foo {}"#,
expect![[r#"
ba label = ""
ba message = ""
ba note = ""
"#]],
);
check(
r#"#[diagnostic::on_unimplemented(message = "foo", $0)] trait Foo {}"#,
expect![[r#"
ba label = ""
ba note = ""
"#]],
);
check(
r#"#[diagnostic::on_unimplemented(note = "foo", $0)] trait Foo {}"#,
expect![[r#"
ba label = ""
ba message = ""
ba note = ""
"#]],
);
}
#[test]
fn attr_on_extern_block() {
check(
@ -619,7 +692,6 @@ fn attr_on_fn() {
at link_name = ""
at link_section = ""
at must_use
at must_use
at no_mangle
at panic_handler
at proc_macro
@ -649,6 +721,8 @@ fn attr_in_source_file_end() {
at deny()
at deprecated
at derive()
at diagnostic::do_not_recommend
at diagnostic::on_unimplemented
at doc = ""
at doc(alias = "")
at doc(hidden)

View file

@ -4451,20 +4451,6 @@ The tracking issue for this feature is: [#133214]
[#133214]: https://github.com/rust-lang/rust/issues/133214
------------------------
"##,
default_severity: Severity::Allow,
warn_since: None,
deny_since: None,
},
Lint {
label: "const_eq_ignore_ascii_case",
description: r##"# `const_eq_ignore_ascii_case`
The tracking issue for this feature is: [#131719]
[#131719]: https://github.com/rust-lang/rust/issues/131719
------------------------
"##,
default_severity: Severity::Allow,

View file

@ -20,7 +20,7 @@
//!
//! The correct behavior in such cases is probably to show a dialog to the user.
//! Our current behavior is ¯\_(ツ)_/¯.
use std::fmt;
use std::fmt::{self, Display};
use crate::{
source_change::ChangeAnnotation,
@ -28,13 +28,12 @@ use crate::{
};
use base_db::AnchoredPathBuf;
use either::Either;
use hir::{EditionedFileId, FieldSource, FileRange, InFile, ModuleSource, Semantics};
use hir::{FieldSource, FileRange, InFile, ModuleSource, Name, Semantics, sym};
use span::{Edition, FileId, SyntaxContext};
use stdx::{TupleExt, never};
use syntax::{
AstNode, SyntaxKind, T, TextRange,
ast::{self, HasName},
utils::is_raw_identifier,
};
use crate::{
@ -70,26 +69,33 @@ macro_rules! _bail {
}
pub use _bail as bail;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum RenameDefinition {
Yes,
No,
}
impl Definition {
pub fn rename(
&self,
sema: &Semantics<'_, RootDatabase>,
new_name: &str,
rename_definition: RenameDefinition,
) -> Result<SourceChange> {
// We append `r#` if needed.
let new_name = new_name.trim_start_matches("r#");
// self.krate() returns None if
// self is a built-in attr, built-in type or tool module.
// it is not allowed for these defs to be renamed.
// cases where self.krate() is None is handled below.
if let Some(krate) = self.krate(sema.db) {
let edition = if let Some(krate) = self.krate(sema.db) {
// Can we not rename non-local items?
// Then bail if non-local
if !krate.origin(sema.db).is_local() {
bail!("Cannot rename a non-local definition")
}
}
krate.edition(sema.db)
} else {
Edition::LATEST
};
match *self {
Definition::Module(module) => rename_mod(sema, module, new_name),
@ -103,8 +109,10 @@ impl Definition {
bail!("Cannot rename a builtin attr.")
}
Definition::SelfType(_) => bail!("Cannot rename `Self`"),
Definition::Macro(mac) => rename_reference(sema, Definition::Macro(mac), new_name),
def => rename_reference(sema, def, new_name),
Definition::Macro(mac) => {
rename_reference(sema, Definition::Macro(mac), new_name, rename_definition, edition)
}
def => rename_reference(sema, def, new_name, rename_definition, edition),
}
}
@ -237,10 +245,6 @@ fn rename_mod(
module: hir::Module,
new_name: &str,
) -> Result<SourceChange> {
if IdentifierKind::classify(new_name)? != IdentifierKind::Ident {
bail!("Invalid name `{0}`: cannot rename module to {0}", new_name);
}
let mut source_change = SourceChange::default();
if module.is_crate_root() {
@ -248,6 +252,14 @@ fn rename_mod(
}
let InFile { file_id, value: def_source } = module.definition_source(sema.db);
let edition = file_id.edition(sema.db);
let (new_name, kind) = IdentifierKind::classify(edition, new_name)?;
if kind != IdentifierKind::Ident {
bail!(
"Invalid name `{0}`: cannot rename module to {0}",
new_name.display(sema.db, edition)
);
}
if let ModuleSource::SourceFile(..) = def_source {
let anchor = file_id.original_file(sema.db).file_id(sema.db);
@ -256,7 +268,7 @@ fn rename_mod(
// Module exists in a named file
if !is_mod_rs {
let path = format!("{new_name}.rs");
let path = format!("{}.rs", new_name.as_str());
let dst = AnchoredPathBuf { anchor, path };
source_change.push_file_system_edit(FileSystemEdit::MoveFile { src: anchor, dst })
}
@ -267,11 +279,11 @@ fn rename_mod(
let dir_paths = match (is_mod_rs, has_detached_child, module.name(sema.db)) {
// Go up one level since the anchor is inside the dir we're trying to rename
(true, _, Some(mod_name)) => {
Some((format!("../{}", mod_name.as_str()), format!("../{new_name}")))
Some((format!("../{}", mod_name.as_str()), format!("../{}", new_name.as_str())))
}
// The anchor is on the same level as target dir
(false, true, Some(mod_name)) => {
Some((mod_name.as_str().to_owned(), new_name.to_owned()))
Some((mod_name.as_str().to_owned(), new_name.as_str().to_owned()))
}
_ => None,
};
@ -296,11 +308,7 @@ fn rename_mod(
.original_file_range_opt(sema.db)
.map(TupleExt::head)
{
let new_name = if is_raw_identifier(new_name, file_id.edition(sema.db)) {
format!("r#{new_name}")
} else {
new_name.to_owned()
};
let new_name = new_name.display(sema.db, edition).to_string();
source_change.insert_source_edit(
file_id.file_id(sema.db),
TextEdit::replace(file_range.range, new_name),
@ -314,9 +322,10 @@ fn rename_mod(
let def = Definition::Module(module);
let usages = def.usages(sema).all();
let ref_edits = usages.iter().map(|(file_id, references)| {
let edition = file_id.edition(sema.db);
(
file_id.file_id(sema.db),
source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
source_edit_from_references(sema.db, references, def, &new_name, edition),
)
});
source_change.extend(ref_edits);
@ -328,8 +337,10 @@ fn rename_reference(
sema: &Semantics<'_, RootDatabase>,
def: Definition,
new_name: &str,
rename_definition: RenameDefinition,
edition: Edition,
) -> Result<SourceChange> {
let ident_kind = IdentifierKind::classify(new_name)?;
let (mut new_name, ident_kind) = IdentifierKind::classify(edition, new_name)?;
if matches!(
def,
@ -337,18 +348,34 @@ fn rename_reference(
) {
match ident_kind {
IdentifierKind::Underscore => {
bail!("Invalid name `{}`: not a lifetime identifier", new_name);
bail!(
"Invalid name `{}`: not a lifetime identifier",
new_name.display(sema.db, edition)
);
}
_ => cov_mark::hit!(rename_lifetime),
IdentifierKind::Ident => {
new_name = Name::new_lifetime(&format!("'{}", new_name.as_str()))
}
IdentifierKind::Lifetime => (),
IdentifierKind::LowercaseSelf => bail!(
"Invalid name `{}`: not a lifetime identifier",
new_name.display(sema.db, edition)
),
}
} else {
match ident_kind {
IdentifierKind::Lifetime => {
cov_mark::hit!(rename_not_an_ident_ref);
bail!("Invalid name `{}`: not an identifier", new_name);
bail!("Invalid name `{}`: not an identifier", new_name.display(sema.db, edition));
}
IdentifierKind::Ident => cov_mark::hit!(rename_non_local),
IdentifierKind::Underscore => (),
IdentifierKind::LowercaseSelf => {
bail!(
"Invalid name `{}`: cannot rename to `self`",
new_name.display(sema.db, edition)
);
}
}
}
@ -361,30 +388,29 @@ fn rename_reference(
}
let mut source_change = SourceChange::default();
source_change.extend(usages.iter().map(|(file_id, references)| {
let edition = file_id.edition(sema.db);
(
file_id.file_id(sema.db),
source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
source_edit_from_references(sema.db, references, def, &new_name, edition),
)
}));
// This needs to come after the references edits, because we change the annotation of existing edits
// if a conflict is detected.
let (file_id, edit) = source_edit_from_def(sema, def, new_name, &mut source_change)?;
source_change.insert_source_edit(file_id, edit);
if rename_definition == RenameDefinition::Yes {
// This needs to come after the references edits, because we change the annotation of existing edits
// if a conflict is detected.
let (file_id, edit) = source_edit_from_def(sema, def, &new_name, &mut source_change)?;
source_change.insert_source_edit(file_id, edit);
}
Ok(source_change)
}
pub fn source_edit_from_references(
db: &RootDatabase,
references: &[FileReference],
def: Definition,
new_name: &str,
new_name: &Name,
edition: Edition,
) -> TextEdit {
let new_name = if is_raw_identifier(new_name, edition) {
format!("r#{new_name}")
} else {
new_name.to_owned()
};
let name_display = new_name.display(db, edition);
let mut edit = TextEdit::builder();
// macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far
let mut edited_ranges = Vec::new();
@ -395,23 +421,15 @@ pub fn source_edit_from_references(
// to make special rewrites like shorthand syntax and such, so just rename the node in
// the macro input
FileReferenceNode::NameRef(name_ref) if name_range == range => {
source_edit_from_name_ref(&mut edit, name_ref, &new_name, def)
source_edit_from_name_ref(&mut edit, name_ref, &name_display, def)
}
FileReferenceNode::Name(name) if name_range == range => {
source_edit_from_name(&mut edit, name, &new_name)
source_edit_from_name(&mut edit, name, &name_display)
}
_ => false,
};
if !has_emitted_edit && !edited_ranges.contains(&range.start()) {
let (range, new_name) = match name {
FileReferenceNode::Lifetime(_) => (
TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
new_name.strip_prefix('\'').unwrap_or(&new_name).to_owned(),
),
_ => (range, new_name.to_owned()),
};
edit.replace(range, new_name);
edit.replace(range, name_display.to_string());
edited_ranges.push(range.start());
}
}
@ -419,7 +437,11 @@ pub fn source_edit_from_references(
edit.finish()
}
fn source_edit_from_name(edit: &mut TextEditBuilder, name: &ast::Name, new_name: &str) -> bool {
fn source_edit_from_name(
edit: &mut TextEditBuilder,
name: &ast::Name,
new_name: &dyn Display,
) -> bool {
if ast::RecordPatField::for_field_name(name).is_some() {
if let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) {
cov_mark::hit!(rename_record_pat_field_name_split);
@ -439,7 +461,7 @@ fn source_edit_from_name(edit: &mut TextEditBuilder, name: &ast::Name, new_name:
fn source_edit_from_name_ref(
edit: &mut TextEditBuilder,
name_ref: &ast::NameRef,
new_name: &str,
new_name: &dyn Display,
def: Definition,
) -> bool {
if name_ref.super_token().is_some() {
@ -452,6 +474,7 @@ fn source_edit_from_name_ref(
match &(rcf_name_ref, rcf_expr.and_then(|it| expr_as_name_ref(&it))) {
// field: init-expr, check if we can use a field init shorthand
(Some(field_name), Some(init)) => {
let new_name = new_name.to_string();
if field_name == name_ref {
if init.text() == new_name {
cov_mark::hit!(test_rename_field_put_init_shorthand);
@ -507,6 +530,7 @@ fn source_edit_from_name_ref(
{
// field name is being renamed
if let Some(name) = pat.name() {
let new_name = new_name.to_string();
if name.text() == new_name {
cov_mark::hit!(test_rename_field_put_init_shorthand_pat);
// Foo { field: ref mut local } -> Foo { ref mut field }
@ -518,7 +542,7 @@ fn source_edit_from_name_ref(
let s = field_name.syntax().text_range().start();
let e = pat.syntax().text_range().start();
edit.delete(TextRange::new(s, e));
edit.replace(name.syntax().text_range(), new_name.to_owned());
edit.replace(name.syntax().text_range(), new_name);
return true;
}
}
@ -532,16 +556,9 @@ fn source_edit_from_name_ref(
fn source_edit_from_def(
sema: &Semantics<'_, RootDatabase>,
def: Definition,
new_name: &str,
new_name: &Name,
source_change: &mut SourceChange,
) -> Result<(FileId, TextEdit)> {
let new_name_edition_aware = |new_name: &str, file_id: EditionedFileId| {
if is_raw_identifier(new_name, file_id.edition(sema.db)) {
format!("r#{new_name}")
} else {
new_name.to_owned()
}
};
let mut edit = TextEdit::builder();
if let Definition::Local(local) = def {
let mut file_id = None;
@ -573,7 +590,10 @@ fn source_edit_from_def(
{
Some(FileRange { file_id: file_id2, range }) => {
file_id = Some(file_id2);
edit.replace(range, new_name_edition_aware(new_name, file_id2));
edit.replace(
range,
new_name.display(sema.db, file_id2.edition(sema.db)).to_string(),
);
continue;
}
None => {
@ -587,7 +607,7 @@ fn source_edit_from_def(
// special cases required for renaming fields/locals in Record patterns
if let Some(pat_field) = pat.syntax().parent().and_then(ast::RecordPatField::cast) {
if let Some(name_ref) = pat_field.name_ref() {
if new_name == name_ref.text().as_str().trim_start_matches("r#")
if new_name.as_str() == name_ref.text().as_str().trim_start_matches("r#")
&& pat.at_token().is_none()
{
// Foo { field: ref mut local } -> Foo { ref mut field }
@ -607,7 +627,9 @@ fn source_edit_from_def(
// ^^^^^ replace this with `new_name`
edit.replace(
name_range,
new_name_edition_aware(new_name, source.file_id),
new_name
.display(sema.db, source.file_id.edition(sema.db))
.to_string(),
);
}
} else {
@ -618,10 +640,16 @@ fn source_edit_from_def(
pat.syntax().text_range().start(),
format!("{}: ", pat_field.field_name().unwrap()),
);
edit.replace(name_range, new_name_edition_aware(new_name, source.file_id));
edit.replace(
name_range,
new_name.display(sema.db, source.file_id.edition(sema.db)).to_string(),
);
}
} else {
edit.replace(name_range, new_name_edition_aware(new_name, source.file_id));
edit.replace(
name_range,
new_name.display(sema.db, source.file_id.edition(sema.db)).to_string(),
);
}
}
}
@ -639,16 +667,13 @@ fn source_edit_from_def(
.range_for_rename(sema)
.ok_or_else(|| format_err!("No identifier available to rename"))?;
let (range, new_name) = match def {
Definition::GenericParam(hir::GenericParam::LifetimeParam(_)) | Definition::Label(_) => (
TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
Definition::ExternCrateDecl(decl) if decl.alias(sema.db).is_none() => (
TextRange::empty(range.end()),
format!(" as {}", new_name.display(sema.db, file_id.edition(sema.db)),),
),
Definition::ExternCrateDecl(decl) if decl.alias(sema.db).is_none() => {
(TextRange::empty(range.end()), format!(" as {new_name}"))
}
_ => (range, new_name.to_owned()),
_ => (range, new_name.display(sema.db, file_id.edition(sema.db)).to_string()),
};
edit.replace(range, new_name_edition_aware(&new_name, file_id));
edit.replace(range, new_name);
Ok((file_id.file_id(sema.db), edit.finish()))
}
@ -657,26 +682,27 @@ pub enum IdentifierKind {
Ident,
Lifetime,
Underscore,
LowercaseSelf,
}
impl IdentifierKind {
pub fn classify(new_name: &str) -> Result<IdentifierKind> {
let new_name = new_name.trim_start_matches("r#");
match parser::LexedStr::single_token(Edition::LATEST, new_name) {
pub fn classify(edition: Edition, new_name: &str) -> Result<(Name, IdentifierKind)> {
match parser::LexedStr::single_token(edition, new_name) {
Some(res) => match res {
(SyntaxKind::IDENT, _) => {
if let Some(inner) = new_name.strip_prefix("r#") {
if matches!(inner, "self" | "crate" | "super" | "Self") {
bail!("Invalid name: `{}` cannot be a raw identifier", inner);
}
}
Ok(IdentifierKind::Ident)
(SyntaxKind::IDENT, _) => Ok((Name::new_root(new_name), IdentifierKind::Ident)),
(T![_], _) => {
Ok((Name::new_symbol_root(sym::underscore), IdentifierKind::Underscore))
}
(T![_], _) => Ok(IdentifierKind::Underscore),
(SyntaxKind::LIFETIME_IDENT, _) if new_name != "'static" && new_name != "'_" => {
Ok(IdentifierKind::Lifetime)
Ok((Name::new_lifetime(new_name), IdentifierKind::Lifetime))
}
_ if is_raw_identifier(new_name, Edition::LATEST) => Ok(IdentifierKind::Ident),
_ if SyntaxKind::from_keyword(new_name, edition).is_some() => match new_name {
"self" => Ok((Name::new_root(new_name), IdentifierKind::LowercaseSelf)),
"crate" | "super" | "Self" => {
bail!("Invalid name `{}`: cannot rename to a keyword", new_name)
}
_ => Ok((Name::new_root(new_name), IdentifierKind::Ident)),
},
(_, Some(syntax_error)) => bail!("Invalid name `{}`: {}", new_name, syntax_error),
(_, None) => bail!("Invalid name `{}`: not an identifier", new_name),
},

View file

@ -1,5 +1,5 @@
use hir::{CaseType, InFile, db::ExpandDatabase};
use ide_db::{assists::Assist, defs::NameClass};
use ide_db::{assists::Assist, defs::NameClass, rename::RenameDefinition};
use syntax::AstNode;
use crate::{
@ -44,7 +44,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option<Vec<Ass
let label = format!("Rename to {}", d.suggested_text);
let mut res = unresolved_fix("change_case", &label, frange.range);
if ctx.resolve.should_resolve(&res.id) {
let source_change = def.rename(&ctx.sema, &d.suggested_text);
let source_change = def.rename(&ctx.sema, &d.suggested_text, RenameDefinition::Yes);
res.source_change = Some(source_change.ok().unwrap_or_default());
}

View file

@ -135,6 +135,7 @@ pub(crate) fn json_in_items(
"JSON syntax is not valid as a Rust item",
FileRange { file_id: vfs_file_id, range },
)
.stable()
.with_fixes(Some(vec![{
let mut scb = SourceChangeBuilder::new(vfs_file_id);
let scope = scb.make_import_scope_mut(import_scope);

View file

@ -1,4 +1,5 @@
use either::Either;
use hir::{Field, HasCrate};
use hir::{HasSource, HirDisplay, Semantics, VariantId, db::ExpandDatabase};
use ide_db::text_edit::TextEdit;
use ide_db::{EditionedFileId, RootDatabase, source_change::SourceChange};
@ -13,44 +14,69 @@ use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
//
// This diagnostic is triggered if created structure does not have field provided in record.
pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Diagnostic {
let node = d.field.map(Into::into);
if d.private {
// FIXME: quickfix to add required visibility
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::RustcHardError("E0451"),
"field is private",
node,
)
.stable()
let (code, message) = if d.private.is_some() {
("E0451", "field is private")
} else if let VariantId::EnumVariantId(_) = d.variant {
("E0559", "no such field")
} else {
Diagnostic::new_with_syntax_node_ptr(
ctx,
match d.variant {
VariantId::EnumVariantId(_) => DiagnosticCode::RustcHardError("E0559"),
_ => DiagnosticCode::RustcHardError("E0560"),
},
"no such field",
node,
)
("E0560", "no such field")
};
let node = d.field.map(Into::into);
Diagnostic::new_with_syntax_node_ptr(ctx, DiagnosticCode::RustcHardError(code), message, node)
.stable()
.with_fixes(fixes(ctx, d))
}
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assist>> {
// FIXME: quickfix for pattern
let root = ctx.sema.db.parse_or_expand(d.field.file_id);
match &d.field.value.to_node(&root) {
Either::Left(node) => missing_record_expr_field_fixes(
&ctx.sema,
d.field.file_id.original_file(ctx.sema.db),
node,
),
Either::Left(node) => {
if let Some(private_field) = d.private {
field_is_private_fixes(
&ctx.sema,
d.field.file_id.original_file(ctx.sema.db),
node,
private_field,
)
} else {
missing_record_expr_field_fixes(
&ctx.sema,
d.field.file_id.original_file(ctx.sema.db),
node,
)
}
}
_ => None,
}
}
fn field_is_private_fixes(
sema: &Semantics<'_, RootDatabase>,
usage_file_id: EditionedFileId,
record_expr_field: &ast::RecordExprField,
private_field: Field,
) -> Option<Vec<Assist>> {
let def_crate = private_field.krate(sema.db);
let usage_crate = sema.file_to_module_def(usage_file_id.file_id(sema.db))?.krate();
let visibility = if usage_crate == def_crate { "pub(crate) " } else { "pub " };
let source = private_field.source(sema.db)?;
let (range, _) = source.syntax().original_file_range_opt(sema.db)?;
let source_change = SourceChange::from_text_edit(
range.file_id.file_id(sema.db),
TextEdit::insert(range.range.start(), visibility.into()),
);
Some(vec![fix(
"increase_field_visibility",
"Increase field visibility",
source_change,
sema.original_range(record_expr_field.syntax()).range,
)])
}
fn missing_record_expr_field_fixes(
sema: &Semantics<'_, RootDatabase>,
usage_file_id: EditionedFileId,
@ -118,7 +144,7 @@ fn missing_record_expr_field_fixes(
"create_field",
"Create field",
source_change,
record_expr_field.syntax().text_range(),
sema.original_range(record_expr_field.syntax()).range,
)]);
fn record_field_list(field_def_list: ast::FieldList) -> Option<ast::RecordFieldList> {
@ -387,21 +413,92 @@ fn f(s@m::Struct {
// assignee expression
m::Struct {
field: 0,
//^^^^^^^^ error: field is private
//^^^^^^^^ 💡 error: field is private
field2
//^^^^^^ error: field is private
//^^^^^^ 💡 error: field is private
} = s;
m::Struct {
field: 0,
//^^^^^^^^ error: field is private
//^^^^^^^^ 💡 error: field is private
field2
//^^^^^^ error: field is private
//^^^^^^ 💡 error: field is private
};
}
"#,
)
}
#[test]
fn test_struct_field_private_same_crate_fix() {
check_diagnostics(
r#"
mod m {
pub struct Struct {
field: u32,
}
}
fn f() {
let _ = m::Struct {
field: 0,
//^^^^^^^^ 💡 error: field is private
};
}
"#,
);
check_fix(
r#"
mod m {
pub struct Struct {
field: u32,
}
}
fn f() {
let _ = m::Struct {
field$0: 0,
};
}
"#,
r#"
mod m {
pub struct Struct {
pub(crate) field: u32,
}
}
fn f() {
let _ = m::Struct {
field: 0,
};
}
"#,
);
}
#[test]
fn test_struct_field_private_other_crate_fix() {
check_fix(
r#"
//- /lib.rs crate:another_crate
pub struct Struct {
field: u32,
}
//- /lib.rs crate:this_crate deps:another_crate
use another_crate;
fn f() {
let _ = another_crate::Struct {
field$0: 0,
};
}
"#,
r#"
pub struct Struct {
pub field: u32,
}
"#,
);
}
#[test]
fn editions_between_macros() {
check_diagnostics(

View file

@ -69,6 +69,7 @@ pub(crate) fn unlinked_file(
FileRange { file_id, range },
)
.with_unused(unused)
.stable()
.with_fixes(fixes),
);
}

View file

@ -800,4 +800,65 @@ foo();
foo();"#]],
);
}
#[test]
fn works_in_sig() {
check(
r#"
macro_rules! foo {
() => { u32 };
}
fn foo() -> foo$0!() {
42
}
"#,
expect![[r#"
foo!
u32"#]],
);
check(
r#"
macro_rules! foo {
() => { u32 };
}
fn foo(_: foo$0!() ) {}
"#,
expect![[r#"
foo!
u32"#]],
);
}
#[test]
fn works_in_generics() {
check(
r#"
trait Trait {}
macro_rules! foo {
() => { Trait };
}
impl<const C: foo$0!()> Trait for () {}
"#,
expect![[r#"
foo!
Trait"#]],
);
}
#[test]
fn works_in_fields() {
check(
r#"
macro_rules! foo {
() => { u32 };
}
struct S {
field: foo$0!(),
}
"#,
expect![[r#"
foo!
u32"#]],
);
}
}

View file

@ -6,7 +6,7 @@ use std::{
use either::Either;
use hir::{
ClosureStyle, DisplayTarget, EditionedFileId, HasVisibility, HirDisplay, HirDisplayError,
HirWrite, ModuleDef, ModuleDefId, Semantics, sym,
HirWrite, InRealFile, ModuleDef, ModuleDefId, Semantics, sym,
};
use ide_db::{FileRange, RootDatabase, famous_defs::FamousDefs, text_edit::TextEditBuilder};
use ide_db::{FxHashSet, text_edit::TextEdit};
@ -34,6 +34,7 @@ mod extern_block;
mod generic_param;
mod implicit_drop;
mod implicit_static;
mod implied_dyn_trait;
mod lifetime;
mod param_name;
mod range_exclusive;
@ -95,16 +96,16 @@ pub(crate) fn inlay_hints(
return acc;
};
let famous_defs = FamousDefs(&sema, scope.krate());
let display_target = famous_defs.1.to_display_target(sema.db);
let ctx = &mut InlayHintCtx::default();
let mut hints = |event| {
if let Some(node) = handle_event(ctx, event) {
hints(&mut acc, ctx, &famous_defs, config, file_id, node);
hints(&mut acc, ctx, &famous_defs, config, file_id, display_target, node);
}
};
let mut preorder = file.preorder();
while let Some(event) = preorder.next() {
// FIXME: This can miss some hints that require the parent of the range to calculate
if matches!((&event, range_limit), (WalkEvent::Enter(node), Some(range)) if range.intersect(node.text_range()).is_none())
{
preorder.skip_subtree();
@ -144,10 +145,12 @@ pub(crate) fn inlay_hints_resolve(
let famous_defs = FamousDefs(&sema, scope.krate());
let mut acc = Vec::new();
let display_target = famous_defs.1.to_display_target(sema.db);
let ctx = &mut InlayHintCtx::default();
let mut hints = |event| {
if let Some(node) = handle_event(ctx, event) {
hints(&mut acc, ctx, &famous_defs, config, file_id, node);
hints(&mut acc, ctx, &famous_defs, config, file_id, display_target, node);
}
};
@ -202,17 +205,19 @@ fn handle_event(ctx: &mut InlayHintCtx, node: WalkEvent<SyntaxNode>) -> Option<S
fn hints(
hints: &mut Vec<InlayHint>,
ctx: &mut InlayHintCtx,
famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
famous_defs @ FamousDefs(sema, _krate): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
file_id: EditionedFileId,
display_target: DisplayTarget,
node: SyntaxNode,
) {
let file_id = file_id.editioned_file_id(sema.db);
let Some(krate) = sema.first_crate(file_id.file_id()) else {
return;
};
let display_target = krate.to_display_target(sema.db);
closing_brace::hints(hints, sema, config, file_id, display_target, node.clone());
closing_brace::hints(
hints,
sema,
config,
display_target,
InRealFile { file_id, value: node.clone() },
);
if let Some(any_has_generic_args) = ast::AnyHasGenericArgs::cast(node.clone()) {
generic_param::hints(hints, famous_defs, config, any_has_generic_args);
}
@ -231,18 +236,18 @@ fn hints(
closure_captures::hints(hints, famous_defs, config, it.clone());
closure_ret::hints(hints, famous_defs, config, display_target, it)
},
ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, famous_defs, config, file_id, it),
ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, famous_defs, config, it),
_ => Some(()),
}
},
ast::Pat(it) => {
binding_mode::hints(hints, famous_defs, config, file_id, &it);
binding_mode::hints(hints, famous_defs, config, &it);
match it {
ast::Pat::IdentPat(it) => {
bind_pat::hints(hints, famous_defs, config, display_target, &it);
}
ast::Pat::RangePat(it) => {
range_exclusive::hints(hints, famous_defs, config, file_id, it);
range_exclusive::hints(hints, famous_defs, config, it);
}
_ => {}
}
@ -250,30 +255,38 @@ fn hints(
},
ast::Item(it) => match it {
ast::Item::Fn(it) => {
implicit_drop::hints(hints, famous_defs, config, file_id, &it);
implicit_drop::hints(hints, famous_defs, config, display_target, &it);
if let Some(extern_block) = &ctx.extern_block_parent {
extern_block::fn_hints(hints, famous_defs, config, file_id, &it, extern_block);
extern_block::fn_hints(hints, famous_defs, config, &it, extern_block);
}
lifetime::fn_hints(hints, ctx, famous_defs, config, file_id, it)
lifetime::fn_hints(hints, ctx, famous_defs, config, it)
},
ast::Item::Static(it) => {
if let Some(extern_block) = &ctx.extern_block_parent {
extern_block::static_hints(hints, famous_defs, config, file_id, &it, extern_block);
extern_block::static_hints(hints, famous_defs, config, &it, extern_block);
}
implicit_static::hints(hints, famous_defs, config, file_id, Either::Left(it))
implicit_static::hints(hints, famous_defs, config, Either::Left(it))
},
ast::Item::Const(it) => implicit_static::hints(hints, famous_defs, config, file_id, Either::Right(it)),
ast::Item::Enum(it) => discriminant::enum_hints(hints, famous_defs, config, file_id, it),
ast::Item::ExternBlock(it) => extern_block::extern_block_hints(hints, famous_defs, config, file_id, it),
ast::Item::Const(it) => implicit_static::hints(hints, famous_defs, config, Either::Right(it)),
ast::Item::Enum(it) => discriminant::enum_hints(hints, famous_defs, config, it),
ast::Item::ExternBlock(it) => extern_block::extern_block_hints(hints, famous_defs, config, it),
_ => None,
},
// FIXME: trait object type elisions
ast::Type(ty) => match ty {
ast::Type::FnPtrType(ptr) => lifetime::fn_ptr_hints(hints, ctx, famous_defs, config, file_id, ptr),
ast::Type::PathType(path) => lifetime::fn_path_hints(hints, ctx, famous_defs, config, file_id, path),
ast::Type::FnPtrType(ptr) => lifetime::fn_ptr_hints(hints, ctx, famous_defs, config, ptr),
ast::Type::PathType(path) => {
lifetime::fn_path_hints(hints, ctx, famous_defs, config, &path);
implied_dyn_trait::hints(hints, famous_defs, config, Either::Left(path));
Some(())
},
ast::Type::DynTraitType(dyn_) => {
implied_dyn_trait::hints(hints, famous_defs, config, Either::Right(dyn_));
Some(())
},
_ => Some(()),
},
ast::GenericParamList(it) => bounds::hints(hints, famous_defs, config, file_id, it),
ast::GenericParamList(it) => bounds::hints(hints, famous_defs, config, it),
_ => Some(()),
}
};
@ -438,6 +451,7 @@ pub enum InlayKind {
Parameter,
GenericParameter,
Type,
Dyn,
Drop,
RangeExclusive,
ExternUnsafety,

View file

@ -8,7 +8,6 @@ use hir::Mutability;
use ide_db::famous_defs::FamousDefs;
use ide_db::text_edit::TextEditBuilder;
use span::EditionedFileId;
use syntax::ast::{self, AstNode};
use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind};
@ -17,7 +16,6 @@ pub(super) fn hints(
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
pat: &ast::Pat,
) -> Option<()> {
if !config.binding_mode_hints {

View file

@ -3,7 +3,6 @@
//! Currently this renders the implied `Sized` bound.
use ide_db::{FileRange, famous_defs::FamousDefs};
use span::EditionedFileId;
use syntax::ast::{self, AstNode, HasTypeBounds};
use crate::{
@ -15,7 +14,6 @@ pub(super) fn hints(
acc: &mut Vec<InlayHint>,
famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
params: ast::GenericParamList,
) -> Option<()> {
if !config.sized_bound {

View file

@ -3,9 +3,8 @@
//! fn g() {
//! } /* fn g */
//! ```
use hir::{DisplayTarget, HirDisplay, Semantics};
use hir::{DisplayTarget, HirDisplay, InRealFile, Semantics};
use ide_db::{FileRange, RootDatabase};
use span::EditionedFileId;
use syntax::{
SyntaxKind, SyntaxNode, T,
ast::{self, AstNode, HasLoopBody, HasName},
@ -21,15 +20,14 @@ pub(super) fn hints(
acc: &mut Vec<InlayHint>,
sema: &Semantics<'_, RootDatabase>,
config: &InlayHintsConfig,
file_id: EditionedFileId,
display_target: DisplayTarget,
original_node: SyntaxNode,
InRealFile { file_id, value: node }: InRealFile<SyntaxNode>,
) -> Option<()> {
let min_lines = config.closing_brace_hints_min_lines?;
let name = |it: ast::Name| it.syntax().text_range();
let mut node = original_node.clone();
let mut node = node.clone();
let mut closing_token;
let (label, name_range) = if let Some(item_list) = ast::AssocItemList::cast(node.clone()) {
closing_token = item_list.r_curly_token()?;
@ -44,7 +42,7 @@ pub(super) fn hints(
let hint_text = match trait_ {
Some(tr) => format!(
"impl {} for {}",
tr.name(sema.db).display(sema.db, file_id.edition()),
tr.name(sema.db).display(sema.db, display_target.edition),
ty.display_truncated(sema.db, config.max_length, display_target,
)),
None => format!("impl {}", ty.display_truncated(sema.db, config.max_length, display_target)),
@ -142,7 +140,8 @@ pub(super) fn hints(
return None;
}
let linked_location = name_range.map(|range| FileRange { file_id: file_id.into(), range });
let linked_location =
name_range.map(|range| FileRange { file_id: file_id.file_id(sema.db), range });
acc.push(InlayHint {
range: closing_token.text_range(),
kind: InlayKind::ClosingBrace,
@ -151,7 +150,7 @@ pub(super) fn hints(
position: InlayHintPosition::After,
pad_left: true,
pad_right: false,
resolve_parent: Some(original_node.text_range()),
resolve_parent: Some(node.text_range()),
});
None

View file

@ -7,7 +7,6 @@
use hir::Semantics;
use ide_db::text_edit::TextEdit;
use ide_db::{RootDatabase, famous_defs::FamousDefs};
use span::EditionedFileId;
use syntax::ast::{self, AstNode, HasName};
use crate::{
@ -19,7 +18,6 @@ pub(super) fn enum_hints(
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_: EditionedFileId,
enum_: ast::Enum,
) -> Option<()> {
if let DiscriminantHints::Never = config.discriminant_hints {

View file

@ -1,6 +1,5 @@
//! Extern block hints
use ide_db::{famous_defs::FamousDefs, text_edit::TextEdit};
use span::EditionedFileId;
use syntax::{AstNode, SyntaxToken, ast};
use crate::{InlayHint, InlayHintsConfig};
@ -9,7 +8,6 @@ pub(super) fn extern_block_hints(
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
extern_block: ast::ExternBlock,
) -> Option<()> {
if extern_block.unsafe_token().is_some() {
@ -36,7 +34,6 @@ pub(super) fn fn_hints(
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
fn_: &ast::Fn,
extern_block: &ast::ExternBlock,
) -> Option<()> {
@ -55,7 +52,6 @@ pub(super) fn static_hints(
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
static_: &ast::Static,
extern_block: &ast::ExternBlock,
) -> Option<()> {

View file

@ -12,7 +12,6 @@ use hir::{
};
use ide_db::{FileRange, famous_defs::FamousDefs};
use span::EditionedFileId;
use syntax::{
ToSmolStr,
ast::{self, AstNode},
@ -25,7 +24,7 @@ pub(super) fn hints(
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
file_id: EditionedFileId,
display_target: hir::DisplayTarget,
node: &ast::Fn,
) -> Option<()> {
if !config.implicit_drop_hints {
@ -94,7 +93,7 @@ pub(super) fn hints(
MirSpan::Unknown => continue,
};
let binding = &hir.bindings[binding_idx];
let name = binding.name.display_no_db(file_id.edition()).to_smolstr();
let name = binding.name.display_no_db(display_target.edition).to_smolstr();
if name.starts_with("<ra@") {
continue; // Ignore desugared variables
}

View file

@ -5,7 +5,6 @@
use either::Either;
use ide_db::famous_defs::FamousDefs;
use ide_db::text_edit::TextEdit;
use span::EditionedFileId;
use syntax::{
SyntaxKind,
ast::{self, AstNode},
@ -17,7 +16,6 @@ pub(super) fn hints(
acc: &mut Vec<InlayHint>,
FamousDefs(_sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
statik_or_const: Either<ast::Static, ast::Const>,
) -> Option<()> {
if config.lifetime_elision_hints != LifetimeElisionHints::Always {

View file

@ -0,0 +1,133 @@
//! Implementation of trait bound hints.
//!
//! Currently this renders the implied `Sized` bound.
use either::Either;
use ide_db::{famous_defs::FamousDefs, text_edit::TextEdit};
use syntax::ast::{self, AstNode};
use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind};
pub(super) fn hints(
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
path: Either<ast::PathType, ast::DynTraitType>,
) -> Option<()> {
let parent = path.syntax().parent()?;
let range = match path {
Either::Left(path) => {
let paren =
parent.ancestors().take_while(|it| ast::ParenType::can_cast(it.kind())).last();
let parent = paren.as_ref().and_then(|it| it.parent()).unwrap_or(parent);
if ast::TypeBound::can_cast(parent.kind())
|| ast::TypeAnchor::can_cast(parent.kind())
|| ast::Impl::cast(parent)
.and_then(|it| it.trait_())
.is_some_and(|it| it.syntax() == path.syntax())
{
return None;
}
sema.resolve_trait(&path.path()?)?;
paren.map_or_else(|| path.syntax().text_range(), |it| it.text_range())
}
Either::Right(dyn_) => {
if dyn_.dyn_token().is_some() {
return None;
}
dyn_.syntax().text_range()
}
};
acc.push(InlayHint {
range,
kind: InlayKind::Dyn,
label: InlayHintLabel::simple("dyn", None, None),
text_edit: Some(
config.lazy_text_edit(|| TextEdit::insert(range.start(), "dyn ".to_owned())),
),
position: InlayHintPosition::Before,
pad_left: false,
pad_right: true,
resolve_parent: Some(range),
});
Some(())
}
#[cfg(test)]
mod tests {
use expect_test::expect;
use crate::inlay_hints::InlayHintsConfig;
use crate::inlay_hints::tests::{DISABLED_CONFIG, check_edit, check_with_config};
#[track_caller]
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
check_with_config(InlayHintsConfig { sized_bound: true, ..DISABLED_CONFIG }, ra_fixture);
}
#[test]
fn path_works() {
check(
r#"
struct S {}
trait T {}
fn foo(_: T, _: dyn T, _: S) {}
// ^ dyn
fn foo(_: &T, _: for<'a> T) {}
// ^ dyn
// ^ dyn
impl T {}
// ^ dyn
impl T for (T) {}
// ^^^ dyn
"#,
);
}
#[test]
fn missing_dyn_bounds() {
check(
r#"
trait T {}
fn foo(
_: T + T,
// ^^^^^ dyn
_: T + 'a,
// ^^^^^^ dyn
_: 'a + T,
// ^^^^^^ dyn
_: &(T + T)
// ^^^^^ dyn
_: &mut (T + T)
// ^^^^^ dyn
_: *mut (T),
// ^^^ dyn
) {}
"#,
);
}
#[test]
fn edit() {
check_edit(
DISABLED_CONFIG,
r#"
trait T {}
fn foo(
_: &mut T
) {}
"#,
expect![[r#"
trait T {}
fn foo(
_: &mut dyn T
) {}
"#]],
);
}
}

View file

@ -6,7 +6,6 @@ use std::iter;
use ide_db::{FxHashMap, famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty};
use itertools::Itertools;
use span::EditionedFileId;
use syntax::{SmolStr, format_smolstr};
use syntax::{
SyntaxKind, SyntaxToken,
@ -23,7 +22,6 @@ pub(super) fn fn_hints(
ctx: &mut InlayHintCtx,
fd: &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
file_id: EditionedFileId,
func: ast::Fn,
) -> Option<()> {
if config.lifetime_elision_hints == LifetimeElisionHints::Never {
@ -40,7 +38,6 @@ pub(super) fn fn_hints(
ctx,
fd,
config,
file_id,
param_list.params().filter_map(|it| {
Some((
it.pat().and_then(|it| match it {
@ -74,7 +71,6 @@ pub(super) fn fn_ptr_hints(
ctx: &mut InlayHintCtx,
fd: &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
file_id: EditionedFileId,
func: ast::FnPtrType,
) -> Option<()> {
if config.lifetime_elision_hints == LifetimeElisionHints::Never {
@ -97,7 +93,6 @@ pub(super) fn fn_ptr_hints(
ctx,
fd,
config,
file_id,
param_list.params().filter_map(|it| {
Some((
it.pat().and_then(|it| match it {
@ -140,8 +135,7 @@ pub(super) fn fn_path_hints(
ctx: &mut InlayHintCtx,
fd: &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
file_id: EditionedFileId,
func: ast::PathType,
func: &ast::PathType,
) -> Option<()> {
if config.lifetime_elision_hints == LifetimeElisionHints::Never {
return None;
@ -163,7 +157,6 @@ pub(super) fn fn_path_hints(
ctx,
fd,
config,
file_id,
param_list.type_args().filter_map(|it| Some((None, it.ty()?))),
generic_param_list,
ret_type,
@ -202,7 +195,6 @@ fn hints_(
ctx: &mut InlayHintCtx,
FamousDefs(_, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
params: impl Iterator<Item = (Option<ast::Name>, ast::Type)>,
generic_param_list: Option<ast::GenericParamList>,
ret_type: Option<ast::RetType>,

View file

@ -4,7 +4,6 @@
//! if let ../* < */100 = 50 {}
//! ```
use ide_db::famous_defs::FamousDefs;
use span::EditionedFileId;
use syntax::{SyntaxToken, T, ast};
use crate::{InlayHint, InlayHintsConfig};
@ -13,7 +12,6 @@ pub(super) fn hints(
acc: &mut Vec<InlayHint>,
FamousDefs(_sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
_file_id: EditionedFileId,
range: impl ast::RangeItem,
) -> Option<()> {
(config.range_exclusive_hints && range.end().is_some())

View file

@ -4,11 +4,11 @@
//! tests. This module also implements a couple of magic tricks, like renaming
//! `self` and to `self` (to switch between associated function and method).
use hir::{AsAssocItem, InFile, Semantics};
use hir::{AsAssocItem, InFile, Name, Semantics, sym};
use ide_db::{
FileId, FileRange, RootDatabase,
defs::{Definition, NameClass, NameRefClass},
rename::{IdentifierKind, bail, format_err, source_edit_from_references},
rename::{IdentifierKind, RenameDefinition, bail, format_err, source_edit_from_references},
source_change::SourceChangeBuilder,
};
use itertools::Itertools;
@ -33,8 +33,8 @@ pub(crate) fn prepare_rename(
let source_file = sema.parse_guess_edition(position.file_id);
let syntax = source_file.syntax();
let res = find_definitions(&sema, syntax, position)?
.map(|(frange, kind, def)| {
let res = find_definitions(&sema, syntax, position, &Name::new_symbol_root(sym::underscore))?
.map(|(frange, kind, def, _, _)| {
// ensure all ranges are valid
if def.range_for_rename(&sema).is_none() {
@ -88,22 +88,28 @@ pub(crate) fn rename(
let source_file = sema.parse(file_id);
let syntax = source_file.syntax();
let defs = find_definitions(&sema, syntax, position)?;
let alias_fallback = alias_fallback(syntax, position, new_name);
let edition = file_id.edition(db);
let (new_name, kind) = IdentifierKind::classify(edition, new_name)?;
let defs = find_definitions(&sema, syntax, position, &new_name)?;
let alias_fallback =
alias_fallback(syntax, position, &new_name.display(db, edition).to_string());
let ops: RenameResult<Vec<SourceChange>> = match alias_fallback {
Some(_) => defs
// FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can
// properly find "direct" usages/references.
.map(|(.., def)| {
match IdentifierKind::classify(new_name)? {
.map(|(.., def, new_name, _)| {
match kind {
IdentifierKind::Ident => (),
IdentifierKind::Lifetime => {
bail!("Cannot alias reference to a lifetime identifier")
}
IdentifierKind::Underscore => bail!("Cannot alias reference to `_`"),
IdentifierKind::LowercaseSelf => {
bail!("Cannot rename alias reference to `self`")
}
};
let mut usages = def.usages(&sema).all();
// FIXME: hack - removes the usage that triggered this rename operation.
@ -120,7 +126,7 @@ pub(crate) fn rename(
source_change.extend(usages.references.get_mut(&file_id).iter().map(|refs| {
(
position.file_id,
source_edit_from_references(refs, def, new_name, file_id.edition(db)),
source_edit_from_references(db, refs, def, &new_name, edition),
)
}));
@ -128,18 +134,18 @@ pub(crate) fn rename(
})
.collect(),
None => defs
.map(|(.., def)| {
.map(|(.., def, new_name, rename_def)| {
if let Definition::Local(local) = def {
if let Some(self_param) = local.as_self_param(sema.db) {
cov_mark::hit!(rename_self_to_param);
return rename_self_to_param(&sema, local, self_param, new_name);
return rename_self_to_param(&sema, local, self_param, &new_name, kind);
}
if new_name == "self" {
if kind == IdentifierKind::LowercaseSelf {
cov_mark::hit!(rename_to_self);
return rename_to_self(&sema, local);
}
}
def.rename(&sema, new_name)
def.rename(&sema, new_name.as_str(), rename_def)
})
.collect(),
};
@ -159,7 +165,7 @@ pub(crate) fn will_rename_file(
let sema = Semantics::new(db);
let module = sema.file_to_module_def(file_id)?;
let def = Definition::Module(module);
let mut change = def.rename(&sema, new_name_stem).ok()?;
let mut change = def.rename(&sema, new_name_stem, RenameDefinition::Yes).ok()?;
change.file_system_edits.clear();
Some(change)
}
@ -200,22 +206,40 @@ fn find_definitions(
sema: &Semantics<'_, RootDatabase>,
syntax: &SyntaxNode,
FilePosition { file_id, offset }: FilePosition,
) -> RenameResult<impl Iterator<Item = (FileRange, SyntaxKind, Definition)>> {
let token = syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING));
new_name: &Name,
) -> RenameResult<impl Iterator<Item = (FileRange, SyntaxKind, Definition, Name, RenameDefinition)>>
{
let maybe_format_args =
syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING));
if let Some((range, _, _, Some(resolution))) =
token.and_then(|token| sema.check_for_format_args_template(token, offset))
maybe_format_args.and_then(|token| sema.check_for_format_args_template(token, offset))
{
return Ok(vec![(
FileRange { file_id, range },
SyntaxKind::STRING,
Definition::from(resolution),
new_name.clone(),
RenameDefinition::Yes,
)]
.into_iter());
}
let original_ident = syntax
.token_at_offset(offset)
.max_by_key(|t| {
t.kind().is_any_identifier() || matches!(t.kind(), SyntaxKind::LIFETIME_IDENT)
})
.map(|t| {
if t.kind() == SyntaxKind::LIFETIME_IDENT {
Name::new_lifetime(t.text())
} else {
Name::new_root(t.text())
}
})
.ok_or_else(|| format_err!("No references found at position"))?;
let symbols =
sema.find_nodes_at_offset_with_descend::<ast::NameLike>(syntax, offset).map(|name_like| {
sema.find_namelike_at_offset_with_descend(syntax, offset).map(|name_like| {
let kind = name_like.syntax().kind();
let range = sema
.original_range_opt(name_like.syntax())
@ -284,23 +308,28 @@ fn find_definitions(
.ok_or_else(|| format_err!("No references found at position"))
}
};
res.map(|def| (range, kind, def))
res.map(|def| {
let n = def.name(sema.db)?;
if n == original_ident {
Some((range, kind, def, new_name.clone(), RenameDefinition::Yes))
} else if let Some(suffix) = n.as_str().strip_prefix(original_ident.as_str()) {
Some((range, kind, def, Name::new_root(&format!("{}{suffix}", new_name.as_str())), RenameDefinition::No))
} else {
n.as_str().strip_suffix(original_ident.as_str().trim_start_matches('\''))
.map(|prefix| (range, kind, def, Name::new_root(&format!("{prefix}{}", new_name.as_str())), RenameDefinition::No))
}
})
});
let res: RenameResult<Vec<_>> = symbols.collect();
let res: RenameResult<Vec<_>> = symbols.filter_map(Result::transpose).collect();
match res {
Ok(v) => {
if v.is_empty() {
// FIXME: some semantic duplication between "empty vec" and "Err()"
Err(format_err!("No references found at position"))
} else {
// remove duplicates, comparing `Definition`s
Ok(v.into_iter()
.unique_by(|&(.., def)| def)
.map(|(a, b, c)| (a.into_file_id(sema.db), b, c))
.collect::<Vec<_>>()
.into_iter())
}
// remove duplicates, comparing `Definition`s
Ok(v.into_iter()
.unique_by(|&(.., def, _, _)| def)
.map(|(a, b, c, d, e)| (a.into_file_id(sema.db), b, c, d, e))
.collect::<Vec<_>>()
.into_iter())
}
Err(e) => Err(e),
}
@ -370,7 +399,13 @@ fn rename_to_self(
source_change.extend(usages.iter().map(|(file_id, references)| {
(
file_id.file_id(sema.db),
source_edit_from_references(references, def, "self", file_id.edition(sema.db)),
source_edit_from_references(
sema.db,
references,
def,
&Name::new_symbol_root(sym::self_),
file_id.edition(sema.db),
),
)
}));
source_change.insert_source_edit(
@ -384,23 +419,25 @@ fn rename_self_to_param(
sema: &Semantics<'_, RootDatabase>,
local: hir::Local,
self_param: hir::SelfParam,
new_name: &str,
new_name: &Name,
identifier_kind: IdentifierKind,
) -> RenameResult<SourceChange> {
if new_name == "self" {
if identifier_kind == IdentifierKind::LowercaseSelf {
// Let's do nothing rather than complain.
cov_mark::hit!(rename_self_to_self);
return Ok(SourceChange::default());
}
let identifier_kind = IdentifierKind::classify(new_name)?;
let InFile { file_id, value: self_param } =
sema.source(self_param).ok_or_else(|| format_err!("cannot find function source"))?;
let def = Definition::Local(local);
let usages = def.usages(sema).all();
let edit = text_edit_from_self_param(&self_param, new_name)
.ok_or_else(|| format_err!("No target type found"))?;
let edit = text_edit_from_self_param(
&self_param,
new_name.display(sema.db, file_id.edition(sema.db)).to_string(),
)
.ok_or_else(|| format_err!("No target type found"))?;
if usages.len() > 1 && identifier_kind == IdentifierKind::Underscore {
bail!("Cannot rename reference to `_` as it is being referenced multiple times");
}
@ -409,13 +446,19 @@ fn rename_self_to_param(
source_change.extend(usages.iter().map(|(file_id, references)| {
(
file_id.file_id(sema.db),
source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
source_edit_from_references(
sema.db,
references,
def,
new_name,
file_id.edition(sema.db),
),
)
}));
Ok(source_change)
}
fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Option<TextEdit> {
fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: String) -> Option<TextEdit> {
fn target_type_name(impl_def: &ast::Impl) -> Option<String> {
if let Some(ast::Type::PathType(p)) = impl_def.self_ty() {
return Some(p.path()?.segment()?.name_ref()?.text().to_string());
@ -427,7 +470,7 @@ fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Opt
Some(impl_def) => {
let type_name = target_type_name(&impl_def)?;
let mut replacement_text = String::from(new_name);
let mut replacement_text = new_name;
replacement_text.push_str(": ");
match (self_param.amp_token(), self_param.mut_token()) {
(Some(_), None) => replacement_text.push('&'),
@ -440,7 +483,7 @@ fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Opt
}
None => {
cov_mark::hit!(rename_self_outside_of_methods);
let mut replacement_text = String::from(new_name);
let mut replacement_text = new_name;
replacement_text.push_str(": _");
Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text))
}
@ -710,7 +753,7 @@ impl Foo {
check(
"super",
r#"fn main() { let i$0 = 1; }"#,
"error: Invalid name `super`: not an identifier",
"error: Invalid name `super`: cannot rename to a keyword",
);
}
@ -759,7 +802,11 @@ impl Foo {
#[test]
fn test_rename_mod_invalid_raw_ident() {
check("r#self", r#"mod foo$0 {}"#, "error: Invalid name `self`: not an identifier");
check(
"r#self",
r#"mod foo$0 {}"#,
"error: Invalid name `self`: cannot rename module to self",
);
}
#[test]
@ -2359,7 +2406,6 @@ fn foo(foo: Foo) {
#[test]
fn test_rename_lifetimes() {
cov_mark::check!(rename_lifetime);
check(
"'yeeee",
r#"
@ -2536,7 +2582,7 @@ fn baz() {
x.0$0 = 5;
}
"#,
"error: No identifier available to rename",
"error: No references found at position",
);
}
@ -2566,7 +2612,7 @@ impl Foo {
}
}
"#,
"error: Cannot rename `Self`",
"error: No references found at position",
);
}
@ -3259,6 +3305,102 @@ trait Trait<T> {
trait Trait<U> {
fn foo() -> impl use<U> Trait {}
}
"#,
);
}
#[test]
fn rename_macro_generated_type_from_type_with_a_suffix() {
check(
"Bar",
r#"
//- proc_macros: generate_suffixed_type
#[proc_macros::generate_suffixed_type]
struct Foo$0;
fn usage(_: FooSuffix) {}
usage(FooSuffix);
"#,
r#"
#[proc_macros::generate_suffixed_type]
struct Bar;
fn usage(_: BarSuffix) {}
usage(BarSuffix);
"#,
);
}
#[test]
// FIXME
#[should_panic]
fn rename_macro_generated_type_from_type_usage_with_a_suffix() {
check(
"Bar",
r#"
//- proc_macros: generate_suffixed_type
#[proc_macros::generate_suffixed_type]
struct Foo;
fn usage(_: FooSuffix) {}
usage(FooSuffix);
fn other_place() { Foo$0; }
"#,
r#"
#[proc_macros::generate_suffixed_type]
struct Bar;
fn usage(_: BarSuffix) {}
usage(BarSuffix);
fn other_place() { Bar; }
"#,
);
}
#[test]
fn rename_macro_generated_type_from_variant_with_a_suffix() {
check(
"Bar",
r#"
//- proc_macros: generate_suffixed_type
#[proc_macros::generate_suffixed_type]
enum Quux {
Foo$0,
}
fn usage(_: FooSuffix) {}
usage(FooSuffix);
"#,
r#"
#[proc_macros::generate_suffixed_type]
enum Quux {
Bar,
}
fn usage(_: BarSuffix) {}
usage(BarSuffix);
"#,
);
}
#[test]
// FIXME
#[should_panic]
fn rename_macro_generated_type_from_variant_usage_with_a_suffix() {
check(
"Bar",
r#"
//- proc_macros: generate_suffixed_type
#[proc_macros::generate_suffixed_type]
enum Quux {
Foo,
}
fn usage(_: FooSuffix) {}
usage(FooSuffix);
fn other_place() { Quux::Foo$0; }
"#,
r#"
#[proc_macros::generate_suffixed_type]
enum Quux {
Bar,
}
fn usage(_: BarSuffix) {}
usage(BartSuffix);
fn other_place() { Quux::Bar$0; }
"#,
);
}

View file

@ -562,8 +562,12 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker {
let m = p.start();
// test closure_binder
// fn main() { for<'a> || (); }
if p.at(T![for]) {
let b = p.start();
types::for_binder(p);
b.complete(p, CLOSURE_BINDER);
}
// test const_closure
// fn main() { let cl = const || _ = 0; }

View file

@ -201,6 +201,17 @@ fn type_bound(p: &mut Parser<'_>) -> bool {
}
if paths::is_use_path_start(p) {
types::path_type_bounds(p, false);
// test_err type_bounds_macro_call_recovery
// fn foo<T: T![], T: T!, T: T!{}>() -> Box<T! + T!{}> {}
if p.at(T![!]) {
let m = p.start();
p.bump(T![!]);
p.error("unexpected `!` in type path, macro calls are not allowed here");
if p.at_ts(TokenSet::new(&[T!['{'], T!['['], T!['(']])) {
items::token_tree(p);
}
m.complete(p, ERROR);
}
} else {
m.abandon(p);
return false;

View file

@ -89,19 +89,22 @@ fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) -> Option<Completed
// test qual_paths
// type X = <A as B>::Output;
// fn foo() { <usize as Default>::default(); }
if first && p.eat(T![<]) {
if first && p.at(T![<]) {
let m = p.start();
p.bump(T![<]);
// test_err angled_path_without_qual
// type X = <()>;
// type Y = <A as B>;
types::type_(p);
if p.eat(T![as]) {
if is_use_path_start(p) {
types::path_type(p);
types::path_type_bounds(p, true);
} else {
p.error("expected a trait");
}
}
p.expect(T![>]);
m.complete(p, TYPE_ANCHOR);
if !p.at(T![::]) {
p.error("expected `::`");
}

View file

@ -330,15 +330,6 @@ fn bare_dyn_trait_type(p: &mut Parser<'_>) {
m.complete(p, DYN_TRAIT_TYPE);
}
// test path_type
// type A = Foo;
// type B = ::Foo;
// type C = self::Foo;
// type D = super::Foo;
pub(super) fn path_type(p: &mut Parser<'_>) {
path_type_bounds(p, true);
}
// test macro_call_type
// type A = foo!();
// type B = crate::foo!();
@ -365,6 +356,11 @@ fn path_or_macro_type(p: &mut Parser<'_>, allow_bounds: bool) {
}
}
// test path_type
// type A = Foo;
// type B = ::Foo;
// type C = self::Foo;
// type D = super::Foo;
pub(super) fn path_type_bounds(p: &mut Parser<'_>, allow_bounds: bool) {
assert!(paths::is_path_start(p));
let m = p.start();

View file

@ -291,6 +291,7 @@ pub enum SyntaxKind {
TUPLE_STRUCT_PAT,
TUPLE_TYPE,
TYPE_ALIAS,
TYPE_ANCHOR,
TYPE_ARG,
TYPE_BOUND,
TYPE_BOUND_LIST,
@ -463,6 +464,7 @@ impl SyntaxKind {
| TUPLE_STRUCT_PAT
| TUPLE_TYPE
| TYPE_ALIAS
| TYPE_ANCHOR
| TYPE_ARG
| TYPE_BOUND
| TYPE_BOUND_LIST

View file

@ -83,6 +83,10 @@ mod ok {
#[test]
fn cast_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/cast_expr.rs"); }
#[test]
fn closure_binder() {
run_and_expect_no_errors("test_data/parser/inline/ok/closure_binder.rs");
}
#[test]
fn closure_body_underscore_assignment() {
run_and_expect_no_errors(
"test_data/parser/inline/ok/closure_body_underscore_assignment.rs",
@ -872,6 +876,10 @@ mod err {
run_and_expect_errors("test_data/parser/inline/err/tuple_pat_leading_comma.rs");
}
#[test]
fn type_bounds_macro_call_recovery() {
run_and_expect_errors("test_data/parser/inline/err/type_bounds_macro_call_recovery.rs");
}
#[test]
fn type_in_array_recover() {
run_and_expect_errors("test_data/parser/inline/err/type_in_array_recover.rs");
}

View file

@ -186,13 +186,14 @@ SOURCE_FILE
TUPLE_EXPR
L_PAREN "("
CLOSURE_EXPR
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
CLOSURE_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
BIN_EXPR
BIN_EXPR

View file

@ -10,11 +10,12 @@ SOURCE_FILE
PATH_TYPE
PATH
PATH_SEGMENT
L_ANGLE "<"
TUPLE_TYPE
L_PAREN "("
R_PAREN ")"
R_ANGLE ">"
TYPE_ANCHOR
L_ANGLE "<"
TUPLE_TYPE
L_PAREN "("
R_PAREN ")"
R_ANGLE ">"
SEMICOLON ";"
WHITESPACE "\n"
TYPE_ALIAS
@ -28,21 +29,22 @@ SOURCE_FILE
PATH_TYPE
PATH
PATH_SEGMENT
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "A"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "B"
R_ANGLE ">"
TYPE_ANCHOR
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "A"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "B"
R_ANGLE ">"
SEMICOLON ";"
WHITESPACE "\n"
error 13: expected `::`

View file

@ -0,0 +1,112 @@
SOURCE_FILE
FN
FN_KW "fn"
WHITESPACE " "
NAME
IDENT "foo"
GENERIC_PARAM_LIST
L_ANGLE "<"
TYPE_PARAM
NAME
IDENT "T"
COLON ":"
WHITESPACE " "
TYPE_BOUND_LIST
TYPE_BOUND
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "T"
ERROR
BANG "!"
TOKEN_TREE
L_BRACK "["
R_BRACK "]"
COMMA ","
WHITESPACE " "
TYPE_PARAM
NAME
IDENT "T"
COLON ":"
WHITESPACE " "
TYPE_BOUND_LIST
TYPE_BOUND
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "T"
ERROR
BANG "!"
COMMA ","
WHITESPACE " "
TYPE_PARAM
NAME
IDENT "T"
COLON ":"
WHITESPACE " "
TYPE_BOUND_LIST
TYPE_BOUND
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "T"
ERROR
BANG "!"
TOKEN_TREE
L_CURLY "{"
R_CURLY "}"
R_ANGLE ">"
PARAM_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
RET_TYPE
THIN_ARROW "->"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Box"
GENERIC_ARG_LIST
L_ANGLE "<"
TYPE_ARG
DYN_TRAIT_TYPE
TYPE_BOUND_LIST
TYPE_BOUND
MACRO_TYPE
MACRO_CALL
PATH
PATH_SEGMENT
NAME_REF
IDENT "T"
BANG "!"
WHITESPACE " "
PLUS "+"
WHITESPACE " "
TYPE_BOUND
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "T"
ERROR
BANG "!"
TOKEN_TREE
L_CURLY "{"
R_CURLY "}"
R_ANGLE ">"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
R_CURLY "}"
WHITESPACE "\n"
error 12: unexpected `!` in type path, macro calls are not allowed here
error 21: unexpected `!` in type path, macro calls are not allowed here
error 28: unexpected `!` in type path, macro calls are not allowed here
error 43: expected `{`, `[`, `(`
error 48: unexpected `!` in type path, macro calls are not allowed here

View file

@ -0,0 +1 @@
fn foo<T: T![], T: T!, T: T!{}>() -> Box<T! + T!{}> {}

View file

@ -88,13 +88,14 @@ SOURCE_FILE
PATH
PATH
PATH_SEGMENT
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Foo"
R_ANGLE ">"
TYPE_ANCHOR
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Foo"
R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF
@ -119,21 +120,22 @@ SOURCE_FILE
PATH
PATH
PATH_SEGMENT
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Foo"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Trait"
R_ANGLE ">"
TYPE_ANCHOR
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Foo"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Trait"
R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF

View file

@ -0,0 +1,36 @@
SOURCE_FILE
FN
FN_KW "fn"
WHITESPACE " "
NAME
IDENT "main"
PARAM_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE " "
EXPR_STMT
CLOSURE_EXPR
CLOSURE_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PARAM_LIST
PIPE "|"
PIPE "|"
WHITESPACE " "
TUPLE_EXPR
L_PAREN "("
R_PAREN ")"
SEMICOLON ";"
WHITESPACE " "
R_CURLY "}"
WHITESPACE "\n"

View file

@ -0,0 +1 @@
fn main() { for<'a> || (); }

View file

@ -202,13 +202,14 @@ SOURCE_FILE
WHITESPACE "\n "
EXPR_STMT
CLOSURE_EXPR
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
CLOSURE_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
PARAM_LIST
PIPE "|"
@ -222,13 +223,14 @@ SOURCE_FILE
WHITESPACE "\n "
EXPR_STMT
CLOSURE_EXPR
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
CLOSURE_BINDER
FOR_KW "for"
GENERIC_PARAM_LIST
L_ANGLE "<"
LIFETIME_PARAM
LIFETIME
LIFETIME_IDENT "'a"
R_ANGLE ">"
WHITESPACE " "
MOVE_KW "move"
WHITESPACE " "

View file

@ -11,21 +11,22 @@ SOURCE_FILE
PATH
PATH
PATH_SEGMENT
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "A"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "B"
R_ANGLE ">"
TYPE_ANCHOR
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "A"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "B"
R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF
@ -51,21 +52,22 @@ SOURCE_FILE
PATH
PATH
PATH_SEGMENT
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "usize"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Default"
R_ANGLE ">"
TYPE_ANCHOR
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "usize"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Default"
R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF

View file

@ -19,10 +19,11 @@ SOURCE_FILE
PATH
PATH
PATH_SEGMENT
L_ANGLE "<"
INFER_TYPE
UNDERSCORE "_"
R_ANGLE ">"
TYPE_ANCHOR
L_ANGLE "<"
INFER_TYPE
UNDERSCORE "_"
R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF

View file

@ -84,21 +84,22 @@ SOURCE_FILE
PATH
PATH
PATH_SEGMENT
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "T"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Iterator"
R_ANGLE ">"
TYPE_ANCHOR
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "T"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Iterator"
R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF

View file

@ -45,21 +45,22 @@ SOURCE_FILE
PATH
PATH
PATH_SEGMENT
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Iterator"
R_ANGLE ">"
TYPE_ANCHOR
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Iterator"
R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF

View file

@ -107,13 +107,14 @@ SOURCE_FILE
PATH
PATH
PATH_SEGMENT
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Foo"
R_ANGLE ">"
TYPE_ANCHOR
L_ANGLE "<"
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Foo"
R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF

View file

@ -288,26 +288,27 @@ SOURCE_FILE
PATH
PATH
PATH_SEGMENT
L_ANGLE "<"
REF_TYPE
AMP "&"
LIFETIME
LIFETIME_IDENT "'a"
TYPE_ANCHOR
L_ANGLE "<"
REF_TYPE
AMP "&"
LIFETIME
LIFETIME_IDENT "'a"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "T"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "T"
WHITESPACE " "
AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "Baz"
R_ANGLE ">"
IDENT "Baz"
R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF

View file

@ -10,6 +10,7 @@
use std::fmt;
use intern::Symbol;
use proc_macro::bridge;
mod token_stream;
@ -112,3 +113,135 @@ fn literal_kind_to_internal(kind: bridge::LitKind) -> tt::LitKind {
bridge::LitKind::ErrWithGuar => tt::LitKind::Err(()),
}
}
pub(super) fn literal_from_str<Span: Copy>(
s: &str,
span: Span,
) -> Result<bridge::Literal<Span, Symbol>, ()> {
use proc_macro::bridge::LitKind;
use rustc_lexer::{LiteralKind, Token, TokenKind};
let mut tokens = rustc_lexer::tokenize(s);
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
let lit = if minus_or_lit.kind == TokenKind::Minus {
let lit = tokens.next().ok_or(())?;
if !matches!(
lit.kind,
TokenKind::Literal { kind: LiteralKind::Int { .. } | LiteralKind::Float { .. }, .. }
) {
return Err(());
}
lit
} else {
minus_or_lit
};
if tokens.next().is_some() {
return Err(());
}
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
let (kind, start_offset, end_offset) = match kind {
LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => (
LitKind::StrRaw(n_hashes.unwrap_or_default()),
2 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
};
let (lit, suffix) = s.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Symbol::intern(suffix)),
};
Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span })
}
pub(super) fn from_token_tree<Span: Copy>(
tree: bridge::TokenTree<TokenStream<Span>, Span, Symbol>,
) -> TokenStream<Span> {
match tree {
bridge::TokenTree::Group(group) => {
let group = TopSubtree::from_bridge(group);
TokenStream { token_trees: group.0 }
}
bridge::TokenTree::Ident(ident) => {
let text = ident.sym;
let ident: tt::Ident<Span> = tt::Ident {
sym: text,
span: ident.span,
is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No },
};
let leaf = tt::Leaf::from(ident);
let tree = tt::TokenTree::from(leaf);
TokenStream { token_trees: vec![tree] }
}
bridge::TokenTree::Literal(literal) => {
let token_trees =
if let Some((_minus, symbol)) = literal.symbol.as_str().split_once('-') {
let punct = tt::Punct {
spacing: tt::Spacing::Alone,
span: literal.span,
char: '-' as char,
};
let leaf: tt::Leaf<Span> = tt::Leaf::from(punct);
let minus_tree = tt::TokenTree::from(leaf);
let literal = tt::Literal {
symbol: Symbol::intern(symbol),
suffix: literal.suffix,
span: literal.span,
kind: literal_kind_to_internal(literal.kind),
};
let leaf: tt::Leaf<Span> = tt::Leaf::from(literal);
let tree = tt::TokenTree::from(leaf);
vec![minus_tree, tree]
} else {
let literal = tt::Literal {
symbol: literal.symbol,
suffix: literal.suffix,
span: literal.span,
kind: literal_kind_to_internal(literal.kind),
};
let leaf: tt::Leaf<Span> = tt::Leaf::from(literal);
let tree = tt::TokenTree::from(leaf);
vec![tree]
};
TokenStream { token_trees }
}
bridge::TokenTree::Punct(p) => {
let punct = tt::Punct {
char: p.ch as char,
spacing: if p.joint { tt::Spacing::Joint } else { tt::Spacing::Alone },
span: p.span,
};
let leaf = tt::Leaf::from(punct);
let tree = tt::TokenTree::from(leaf);
TokenStream { token_trees: vec![tree] }
}
}
}

View file

@ -14,16 +14,7 @@ use proc_macro::bridge::{self, server};
use span::{FIXUP_ERASED_FILE_AST_ID_MARKER, Span};
use tt::{TextRange, TextSize};
use crate::server_impl::{TopSubtree, literal_kind_to_internal, token_stream::TokenStreamBuilder};
mod tt {
pub use tt::*;
pub type TokenTree = ::tt::TokenTree<super::Span>;
pub type Leaf = ::tt::Leaf<super::Span>;
pub type Literal = ::tt::Literal<super::Span>;
pub type Punct = ::tt::Punct<super::Span>;
pub type Ident = ::tt::Ident<super::Span>;
}
use crate::server_impl::{from_token_tree, literal_from_str, token_stream::TokenStreamBuilder};
type TokenStream = crate::server_impl::TokenStream<Span>;
@ -62,66 +53,7 @@ impl server::FreeFunctions for RaSpanServer {
&mut self,
s: &str,
) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
use proc_macro::bridge::LitKind;
use rustc_lexer::{LiteralKind, Token, TokenKind};
let mut tokens = rustc_lexer::tokenize(s);
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
let lit = if minus_or_lit.kind == TokenKind::Minus {
let lit = tokens.next().ok_or(())?;
if !matches!(
lit.kind,
TokenKind::Literal {
kind: LiteralKind::Int { .. } | LiteralKind::Float { .. },
..
}
) {
return Err(());
}
lit
} else {
minus_or_lit
};
if tokens.next().is_some() {
return Err(());
}
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
let (kind, start_offset, end_offset) = match kind {
LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => (
LitKind::StrRaw(n_hashes.unwrap_or_default()),
2 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
};
let (lit, suffix) = s.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Symbol::intern(suffix)),
};
Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span: self.call_site })
literal_from_str(s, self.call_site)
}
fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {
@ -149,70 +81,7 @@ impl server::TokenStream for RaSpanServer {
&mut self,
tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
) -> Self::TokenStream {
match tree {
bridge::TokenTree::Group(group) => {
let group = TopSubtree::from_bridge(group);
TokenStream { token_trees: group.0 }
}
bridge::TokenTree::Ident(ident) => {
let text = ident.sym;
let ident: tt::Ident = tt::Ident {
sym: text,
span: ident.span,
is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No },
};
let leaf = tt::Leaf::from(ident);
let tree = tt::TokenTree::from(leaf);
TokenStream { token_trees: vec![tree] }
}
bridge::TokenTree::Literal(literal) => {
let token_trees =
if let Some((_minus, symbol)) = literal.symbol.as_str().split_once('-') {
let punct = tt::Punct {
spacing: tt::Spacing::Alone,
span: literal.span,
char: '-' as char,
};
let leaf: tt::Leaf = tt::Leaf::from(punct);
let minus_tree = tt::TokenTree::from(leaf);
let literal = tt::Literal {
symbol: Symbol::intern(symbol),
suffix: literal.suffix,
span: literal.span,
kind: literal_kind_to_internal(literal.kind),
};
let leaf: tt::Leaf = tt::Leaf::from(literal);
let tree = tt::TokenTree::from(leaf);
vec![minus_tree, tree]
} else {
let literal = tt::Literal {
symbol: literal.symbol,
suffix: literal.suffix,
span: literal.span,
kind: literal_kind_to_internal(literal.kind),
};
let leaf: tt::Leaf = tt::Leaf::from(literal);
let tree = tt::TokenTree::from(leaf);
vec![tree]
};
TokenStream { token_trees }
}
bridge::TokenTree::Punct(p) => {
let punct = tt::Punct {
char: p.ch as char,
spacing: if p.joint { tt::Spacing::Joint } else { tt::Spacing::Alone },
span: p.span,
};
let leaf = tt::Leaf::from(punct);
let tree = tt::TokenTree::from(leaf);
TokenStream { token_trees: vec![tree] }
}
}
from_token_tree(tree)
}
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {

View file

@ -5,23 +5,9 @@ use std::ops::{Bound, Range};
use intern::Symbol;
use proc_macro::bridge::{self, server};
use crate::server_impl::{TopSubtree, literal_kind_to_internal, token_stream::TokenStreamBuilder};
mod tt {
pub use span::TokenId;
use crate::server_impl::{from_token_tree, literal_from_str, token_stream::TokenStreamBuilder};
pub use tt::*;
pub type TokenTree = ::tt::TokenTree<TokenId>;
pub type Leaf = ::tt::Leaf<TokenId>;
pub type Literal = ::tt::Literal<TokenId>;
pub type Punct = ::tt::Punct<TokenId>;
pub type Ident = ::tt::Ident<TokenId>;
}
type TokenTree = tt::TokenTree;
type Punct = tt::Punct;
type Spacing = tt::Spacing;
type Literal = tt::Literal;
type Span = tt::TokenId;
type Span = span::TokenId;
type TokenStream = crate::server_impl::TokenStream<Span>;
pub struct FreeFunctions;
@ -49,67 +35,7 @@ impl server::FreeFunctions for TokenIdServer {
&mut self,
s: &str,
) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
use proc_macro::bridge::LitKind;
use rustc_lexer::{LiteralKind, Token, TokenKind};
let mut tokens = rustc_lexer::tokenize(s);
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
let lit = if minus_or_lit.kind == TokenKind::Minus {
let lit = tokens.next().ok_or(())?;
if !matches!(
lit.kind,
TokenKind::Literal {
kind: LiteralKind::Int { .. } | LiteralKind::Float { .. },
..
}
) {
return Err(());
}
lit
} else {
minus_or_lit
};
if tokens.next().is_some() {
return Err(());
}
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
let (kind, start_offset, end_offset) = match kind {
LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => (
LitKind::StrRaw(n_hashes.unwrap_or_default()),
2 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
};
let (lit, suffix) = s.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Symbol::intern(suffix)),
};
Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span: self.call_site })
literal_from_str(s, self.call_site)
}
fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {}
@ -135,69 +61,7 @@ impl server::TokenStream for TokenIdServer {
&mut self,
tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
) -> Self::TokenStream {
match tree {
bridge::TokenTree::Group(group) => {
let group = TopSubtree::from_bridge(group);
TokenStream { token_trees: group.0 }
}
bridge::TokenTree::Ident(ident) => {
let ident: tt::Ident = tt::Ident {
sym: ident.sym,
span: ident.span,
is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No },
};
let leaf = tt::Leaf::from(ident);
let tree = TokenTree::from(leaf);
TokenStream { token_trees: vec![tree] }
}
bridge::TokenTree::Literal(literal) => {
let token_trees =
if let Some((_minus, symbol)) = literal.symbol.as_str().split_once('-') {
let punct = tt::Punct {
spacing: tt::Spacing::Alone,
span: literal.span,
char: '-' as char,
};
let leaf: tt::Leaf = tt::Leaf::from(punct);
let minus_tree = tt::TokenTree::from(leaf);
let literal = Literal {
symbol: Symbol::intern(symbol),
suffix: literal.suffix,
span: literal.span,
kind: literal_kind_to_internal(literal.kind),
};
let leaf: tt::Leaf = tt::Leaf::from(literal);
let tree = tt::TokenTree::from(leaf);
vec![minus_tree, tree]
} else {
let literal = Literal {
symbol: literal.symbol,
suffix: literal.suffix,
span: literal.span,
kind: literal_kind_to_internal(literal.kind),
};
let leaf: tt::Leaf = tt::Leaf::from(literal);
let tree = tt::TokenTree::from(leaf);
vec![tree]
};
TokenStream { token_trees }
}
bridge::TokenTree::Punct(p) => {
let punct = Punct {
char: p.ch as char,
spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
span: p.span,
};
let leaf = tt::Leaf::from(punct);
let tree = TokenTree::from(leaf);
TokenStream { token_trees: vec![tree] }
}
}
from_token_tree(tree)
}
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
@ -337,6 +201,8 @@ impl server::Server for TokenIdServer {
#[cfg(test)]
mod tests {
use span::TokenId;
use super::*;
#[test]
@ -345,18 +211,18 @@ mod tests {
token_trees: vec![
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("struct"),
span: tt::TokenId(0),
span: TokenId(0),
is_raw: tt::IdentIsRaw::No,
})),
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("T"),
span: tt::TokenId(0),
span: TokenId(0),
is_raw: tt::IdentIsRaw::No,
})),
tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
open: tt::TokenId(0),
close: tt::TokenId(0),
open: TokenId(0),
close: TokenId(0),
kind: tt::DelimiterKind::Brace,
},
len: 0,
@ -372,8 +238,8 @@ mod tests {
let subtree_paren_a = vec![
tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
open: tt::TokenId(0),
close: tt::TokenId(0),
open: TokenId(0),
close: TokenId(0),
kind: tt::DelimiterKind::Parenthesis,
},
len: 1,
@ -381,24 +247,24 @@ mod tests {
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
is_raw: tt::IdentIsRaw::No,
sym: Symbol::intern("a"),
span: tt::TokenId(0),
span: TokenId(0),
})),
];
let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap();
let t1 = TokenStream::from_str("(a)", TokenId(0)).unwrap();
assert_eq!(t1.token_trees.len(), 2);
assert!(t1.token_trees[0..2] == subtree_paren_a);
let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap();
let t2 = TokenStream::from_str("(a);", TokenId(0)).unwrap();
assert_eq!(t2.token_trees.len(), 3);
assert!(t2.token_trees[0..2] == subtree_paren_a);
let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap();
let underscore = TokenStream::from_str("_", TokenId(0)).unwrap();
assert!(
underscore.token_trees[0]
== tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("_"),
span: tt::TokenId(0),
span: TokenId(0),
is_raw: tt::IdentIsRaw::No,
}))
);

View file

@ -431,12 +431,6 @@ impl CargoWorkspace {
.ok_or(cargo_metadata::Error::NoJson)?;
Ok((cargo_metadata::MetadataCommand::parse(stdout)?, None))
})()
.map(|(metadata, error)| {
(
metadata,
error.map(|e| e.context(format!("Failed to run `{:?}`", meta.cargo_command()))),
)
})
.with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))
}

View file

@ -1192,7 +1192,7 @@ impl ConfigChange {
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum LinkedProject {
ProjectManifest(ProjectManifest),
InlineJsonProject(ProjectJson),
InlineProjectJson(ProjectJson),
}
impl From<ProjectManifest> for LinkedProject {
@ -1203,7 +1203,7 @@ impl From<ProjectManifest> for LinkedProject {
impl From<ProjectJson> for LinkedProject {
fn from(v: ProjectJson) -> Self {
LinkedProject::InlineJsonProject(v)
LinkedProject::InlineProjectJson(v)
}
}
@ -1597,6 +1597,16 @@ impl Config {
term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(),
}
}
pub fn diagnostic_fixes(&self, source_root: Option<SourceRootId>) -> DiagnosticsConfig {
// We always want to show quickfixes for diagnostics, even when diagnostics/experimental diagnostics are disabled.
DiagnosticsConfig {
enabled: true,
disable_experimental: false,
..self.diagnostics(source_root)
}
}
pub fn expand_proc_attr_macros(&self) -> bool {
self.procMacro_enable().to_owned() && self.procMacro_attributes_enable().to_owned()
}

View file

@ -1439,7 +1439,7 @@ pub(crate) fn handle_code_action(
};
let assists = snap.analysis.assists_with_fixes(
&assists_config,
&snap.config.diagnostics(Some(source_root)),
&snap.config.diagnostic_fixes(Some(source_root)),
resolve,
frange,
)?;
@ -1530,7 +1530,7 @@ pub(crate) fn handle_code_action_resolve(
let assists = snap.analysis.assists_with_fixes(
&assists_config,
&snap.config.diagnostics(Some(source_root)),
&snap.config.diagnostic_fixes(Some(source_root)),
AssistResolveStrategy::Single(assist_resolve),
frange,
)?;

View file

@ -108,8 +108,7 @@ impl GlobalState {
/// edge users from being upset!
pub(crate) fn poke_rust_analyzer_developer(&mut self, message: String) {
let from_source_build = option_env!("POKE_RA_DEVS").is_some();
let profiling_enabled = std::env::var("RA_PROFILE").is_ok();
if from_source_build || profiling_enabled {
if from_source_build {
self.show_and_log_error(message, None);
}
}

View file

@ -292,7 +292,7 @@ impl GlobalState {
if let (Some(_command), Some(path)) = (&discover_command, &path) {
let build = linked_projects.iter().find_map(|project| match project {
LinkedProject::InlineJsonProject(it) => it.crate_by_buildfile(path),
LinkedProject::InlineProjectJson(it) => it.crate_by_buildfile(path),
_ => None,
});
@ -318,7 +318,7 @@ impl GlobalState {
&progress,
)
}
LinkedProject::InlineJsonProject(it) => {
LinkedProject::InlineProjectJson(it) => {
let workspace = project_model::ProjectWorkspace::load_inline(
it.clone(),
&cargo_config,

View file

@ -975,10 +975,6 @@ version = \"0.0.0\"
}
fn out_dirs_check_impl(root_contains_symlink: bool) {
if skip_slow_tests() {
return;
}
let mut server = Project::with_fixture(
r###"
//- /Cargo.toml
@ -1130,12 +1126,18 @@ fn main() {
#[test]
fn out_dirs_check() {
if skip_slow_tests() {
return;
}
out_dirs_check_impl(false);
}
#[test]
#[cfg(not(windows))] // windows requires elevated permissions to create symlinks
fn root_contains_symlink_out_dirs_check() {
if skip_slow_tests() {
return;
}
out_dirs_check_impl(true);
}

View file

@ -439,6 +439,7 @@ assist.emitMustUse = true"#,
}
#[test]
#[ignore = "flaky test that tends to hang"]
fn ratoml_inherit_config_from_ws_root() {
if skip_slow_tests() {
return;

View file

@ -39,7 +39,10 @@ PathSegment =
| NameRef GenericArgList?
| NameRef ParenthesizedArgList RetType?
| NameRef ReturnTypeSyntax
| '<' Type ('as' PathType)? '>'
| TypeAnchor
TypeAnchor =
'<' Type ('as' PathType)? '>'
ReturnTypeSyntax =
'(' '..' ')'
@ -98,7 +101,7 @@ WhereClause =
'where' predicates:(WherePred (',' WherePred)* ','?)
WherePred =
('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList?
('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList?
//*************************//

View file

@ -1232,21 +1232,13 @@ impl PathSegment {
support::child(&self.syntax)
}
#[inline]
pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
#[inline]
pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
#[inline]
pub fn return_type_syntax(&self) -> Option<ReturnTypeSyntax> { support::child(&self.syntax) }
#[inline]
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
pub fn type_anchor(&self) -> Option<TypeAnchor> { support::child(&self.syntax) }
#[inline]
pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
#[inline]
pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
#[inline]
pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
#[inline]
pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
}
pub struct PathType {
pub(crate) syntax: SyntaxNode,
@ -1739,6 +1731,21 @@ impl TypeAlias {
#[inline]
pub fn type_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![type]) }
}
pub struct TypeAnchor {
pub(crate) syntax: SyntaxNode,
}
impl TypeAnchor {
#[inline]
pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
#[inline]
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
#[inline]
pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
#[inline]
pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
#[inline]
pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
}
pub struct TypeArg {
pub(crate) syntax: SyntaxNode,
}
@ -7108,6 +7115,42 @@ impl fmt::Debug for TypeAlias {
f.debug_struct("TypeAlias").field("syntax", &self.syntax).finish()
}
}
impl AstNode for TypeAnchor {
#[inline]
fn kind() -> SyntaxKind
where
Self: Sized,
{
TYPE_ANCHOR
}
#[inline]
fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ANCHOR }
#[inline]
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) {
Some(Self { syntax })
} else {
None
}
}
#[inline]
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl hash::Hash for TypeAnchor {
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
}
impl Eq for TypeAnchor {}
impl PartialEq for TypeAnchor {
fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
}
impl Clone for TypeAnchor {
fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
}
impl fmt::Debug for TypeAnchor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("TypeAnchor").field("syntax", &self.syntax).finish()
}
}
impl AstNode for TypeArg {
#[inline]
fn kind() -> SyntaxKind
@ -10624,6 +10667,11 @@ impl std::fmt::Display for TypeAlias {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for TypeAnchor {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for TypeArg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)

View file

@ -276,18 +276,15 @@ impl ast::PathSegment {
_ => PathSegmentKind::Name(name_ref),
}
} else {
match self.syntax().first_child_or_token()?.kind() {
T![<] => {
// <T> or <T as Trait>
// T is any TypeRef, Trait has to be a PathType
let mut type_refs =
self.syntax().children().filter(|node| ast::Type::can_cast(node.kind()));
let type_ref = type_refs.next().and_then(ast::Type::cast);
let trait_ref = type_refs.next().and_then(ast::PathType::cast);
PathSegmentKind::Type { type_ref, trait_ref }
}
_ => return None,
}
let anchor = self.type_anchor()?;
// FIXME: Move this over to `ast::TypeAnchor`
// <T> or <T as Trait>
// T is any TypeRef, Trait has to be a PathType
let mut type_refs =
anchor.syntax().children().filter(|node| ast::Type::can_cast(node.kind()));
let type_ref = type_refs.next().and_then(ast::Type::cast);
let trait_ref = type_refs.next().and_then(ast::PathType::cast);
PathSegmentKind::Type { type_ref, trait_ref }
};
Some(res)
}
@ -473,7 +470,7 @@ impl ast::Impl {
// [#15778](https://github.com/rust-lang/rust-analyzer/issues/15778)
impl ast::PathSegment {
pub fn qualifying_trait(&self) -> Option<ast::PathType> {
let mut path_types = support::children(self.syntax());
let mut path_types = support::children(self.type_anchor()?.syntax());
let first = path_types.next()?;
path_types.next().or(Some(first))
}

View file

@ -538,6 +538,21 @@ pub fn disallow_cfg(_attr: TokenStream, input: TokenStream) -> TokenStream {
disabled: false,
},
),
(
r#"
#[proc_macro_attribute]
pub fn generate_suffixed_type(_attr: TokenStream, input: TokenStream) -> TokenStream {
input
}
"#
.into(),
ProcMacro {
name: Symbol::intern("generate_suffixed_type"),
kind: ProcMacroKind::Attr,
expander: sync::Arc::new(GenerateSuffixedTypeProcMacroExpander),
disabled: false,
},
),
])
}
@ -919,3 +934,57 @@ impl ProcMacroExpander for DisallowCfgProcMacroExpander {
Ok(subtree.clone())
}
}
// Generates a new type by adding a suffix to the original name
#[derive(Debug)]
struct GenerateSuffixedTypeProcMacroExpander;
impl ProcMacroExpander for GenerateSuffixedTypeProcMacroExpander {
fn expand(
&self,
subtree: &TopSubtree,
_attrs: Option<&TopSubtree>,
_env: &Env,
_def_site: Span,
call_site: Span,
_mixed_site: Span,
_current_dir: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
let TokenTree::Leaf(Leaf::Ident(ident)) = &subtree.0[1] else {
return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
};
let ident = match ident.sym.as_str() {
"struct" => {
let TokenTree::Leaf(Leaf::Ident(ident)) = &subtree.0[2] else {
return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
};
ident
}
"enum" => {
let TokenTree::Leaf(Leaf::Ident(ident)) = &subtree.0[4] else {
return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
};
ident
}
_ => {
return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
}
};
let generated_ident = tt::Ident {
sym: Symbol::intern(&format!("{}Suffix", ident.sym)),
span: ident.span,
is_raw: tt::IdentIsRaw::No,
};
let ret = quote! { call_site =>
#subtree
struct #generated_ident;
};
Ok(ret)
}
}

View file

@ -5,7 +5,7 @@ build system, youll have to describe the structure of your project for
rust-analyzer in the `rust-project.json` format:
```typescript
interface JsonProject {
interface ProjectJson {
/// Path to the sysroot directory.
///
/// The sysroot is where rustc looks for the

View file

@ -5500,9 +5500,9 @@
}
},
"node_modules/tar-fs": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz",
"integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==",
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.3.tgz",
"integrity": "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg==",
"dev": true,
"license": "MIT",
"optional": true,

View file

@ -1 +1 @@
a8e4c68dcb4dc1e48a0db294c5323cab0227fcb9
7c10378e1fee5ddc6573b916aeb884ab10e0de17