Merge pull request #19001 from ShoyuVanilla/default-field-values

feat: Implement `default-field-values`
This commit is contained in:
Lukas Wirth 2025-01-27 11:45:07 +00:00 committed by GitHub
commit 6862329068
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
46 changed files with 905 additions and 94 deletions

View file

@ -85,6 +85,7 @@ pub struct FieldData {
pub name: Name,
pub type_ref: TypeRefId,
pub visibility: RawVisibility,
pub has_default: bool,
}
fn repr_from_value(
@ -478,5 +479,6 @@ fn lower_field(
name: field.name.clone(),
type_ref: field.type_ref,
visibility: item_tree[override_visibility.unwrap_or(field.visibility)].clone(),
has_default: field.has_default,
}
}

View file

@ -25,7 +25,7 @@ use crate::{
db::DefDatabase,
hir::{
Array, AsmOperand, Binding, BindingId, Expr, ExprId, ExprOrPatId, Label, LabelId, Pat,
PatId, RecordFieldPat, Statement,
PatId, RecordFieldPat, Spread, Statement,
},
nameres::DefMap,
path::{ModPath, Path},
@ -362,7 +362,7 @@ impl ExpressionStore {
for field in fields.iter() {
f(field.expr);
}
if let &Some(expr) = spread {
if let &Spread::Base(expr) = spread {
f(expr);
}
}
@ -490,7 +490,7 @@ impl ExpressionStore {
for field in fields.iter() {
f(field.expr);
}
if let &Some(expr) = spread {
if let &Spread::Base(expr) = spread {
f(expr);
}
}

View file

@ -122,6 +122,9 @@ impl Body {
src.map(|it| it.expr())
}
DefWithBodyId::InTypeConstId(c) => c.lookup(db).id.map(|_| c.source(db).expr()),
DefWithBodyId::FieldId(f) => {
f.record_field_source(db).map(|it| it.and_then(|it| it.expr()))
}
}
};
let module = def.module(db);

View file

@ -45,7 +45,7 @@ use crate::{
},
Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, ClosureKind,
Expr, ExprId, Item, Label, LabelId, Literal, LiteralOrConst, MatchArm, Movability,
OffsetOf, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
OffsetOf, Pat, PatId, RecordFieldPat, RecordLitField, Spread, Statement,
},
item_scope::BuiltinShadowMode,
lang_item::LangItem,
@ -90,6 +90,7 @@ pub(super) fn lower_body(
DefWithBodyId::ConstId(it) => db.attrs(it.into()),
DefWithBodyId::InTypeConstId(_) => Attrs::EMPTY,
DefWithBodyId::VariantId(it) => db.attrs(it.into()),
DefWithBodyId::FieldId(it) => db.attrs(it.into()),
}
.rust_analyzer_tool()
.any(|attr| *attr.path() == tool_path![skip]);
@ -168,6 +169,7 @@ pub(super) fn lower_body(
Awaitable::No("constant")
}
DefWithBodyId::VariantId(..) => Awaitable::No("enum variant"),
DefWithBodyId::FieldId(..) => Awaitable::No("field"),
}
},
);
@ -600,10 +602,13 @@ impl ExprCollector<'_> {
Some(RecordLitField { name, expr })
})
.collect();
let spread = nfl.spread().map(|s| self.collect_expr(s));
let spread = nfl.spread().map(|s| self.collect_expr(s)).map_or_else(
|| if nfl.dotdot_token().is_some() { Spread::Yes } else { Spread::No },
Spread::Base,
);
Expr::RecordLit { path, fields, spread }
} else {
Expr::RecordLit { path, fields: Box::default(), spread: None }
Expr::RecordLit { path, fields: Box::default(), spread: Spread::No }
};
self.alloc_expr(record_lit, syntax_ptr)

View file

@ -8,9 +8,10 @@ use span::Edition;
use crate::{
hir::{
Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, LiteralOrConst, Movability,
Statement,
Spread, Statement,
},
pretty::{print_generic_args, print_path, print_type_ref},
VariantId,
};
use super::*;
@ -56,6 +57,32 @@ pub(super) fn print_body_hir(
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
)
}
DefWithBodyId::FieldId(it) => {
let parent_name: String = match it.parent {
VariantId::EnumVariantId(it) => {
let loc = it.lookup(db);
let enum_loc = loc.parent.lookup(db);
format!(
"{}::{}",
enum_loc.id.item_tree(db)[enum_loc.id.value]
.name
.display(db.upcast(), edition),
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
)
}
VariantId::StructId(it) => it
.lookup(db)
.id
.resolved(db, |it| it.name.display(db.upcast(), edition).to_string()),
VariantId::UnionId(it) => it
.lookup(db)
.id
.resolved(db, |it| it.name.display(db.upcast(), edition).to_string()),
};
let variant_data = it.parent.variant_data(db);
let field_name = &variant_data.fields()[it.local_id].name;
format!("field {}.{}", parent_name, field_name.display(db.upcast(), edition),)
}
};
let mut p = Printer {
@ -385,10 +412,16 @@ impl Printer<'_> {
p.print_expr(field.expr);
wln!(p, ",");
}
if let Some(spread) = spread {
w!(p, "..");
p.print_expr(*spread);
wln!(p);
match spread {
Spread::No => {}
Spread::Yes => {
w!(p, "..");
}
Spread::Base(expr) => {
w!(p, "..");
p.print_expr(*expr);
wln!(p);
}
}
});
w!(self, "}}");

View file

@ -251,7 +251,7 @@ pub enum Expr {
RecordLit {
path: Option<Box<Path>>,
fields: Box<[RecordLitField]>,
spread: Option<ExprId>,
spread: Spread,
},
Field {
expr: ExprId,
@ -478,6 +478,13 @@ pub struct RecordLitField {
pub expr: ExprId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Spread {
No,
Yes,
Base(ExprId),
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Statement {
Let {

View file

@ -1006,6 +1006,7 @@ pub struct Field {
pub name: Name,
pub type_ref: TypeRefId,
pub visibility: RawVisibilityId,
pub has_default: bool,
}
#[derive(Debug, Clone, Eq, PartialEq)]

View file

@ -319,8 +319,9 @@ impl<'a> Ctx<'a> {
};
let visibility = self.lower_visibility(field);
let type_ref = TypeRef::from_ast_opt(body_ctx, field.ty());
let has_default = field.expr().is_some();
Field { name, type_ref, visibility }
Field { name, type_ref, visibility, has_default }
}
fn lower_tuple_field(
@ -332,7 +333,7 @@ impl<'a> Ctx<'a> {
let name = Name::new_tuple_field(idx);
let visibility = self.lower_visibility(field);
let type_ref = TypeRef::from_ast_opt(body_ctx, field.ty());
Field { name, type_ref, visibility }
Field { name, type_ref, visibility, has_default: false }
}
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {

View file

@ -135,7 +135,9 @@ impl Printer<'_> {
self.whitespace();
w!(self, "{{");
self.indented(|this| {
for (idx, Field { name, type_ref, visibility }) in fields.iter().enumerate() {
for (idx, Field { name, type_ref, visibility, has_default: _ }) in
fields.iter().enumerate()
{
this.print_attrs_of(
AttrOwner::Field(parent, Idx::from_raw(RawIdx::from(idx as u32))),
"\n",
@ -151,7 +153,9 @@ impl Printer<'_> {
FieldsShape::Tuple => {
w!(self, "(");
self.indented(|this| {
for (idx, Field { name, type_ref, visibility }) in fields.iter().enumerate() {
for (idx, Field { name, type_ref, visibility, has_default: _ }) in
fields.iter().enumerate()
{
this.print_attrs_of(
AttrOwner::Field(parent, Idx::from_raw(RawIdx::from(idx as u32))),
"\n",

View file

@ -55,6 +55,7 @@ pub mod visibility;
use intern::Interned;
pub use rustc_abi as layout;
use src::HasSource;
use triomphe::Arc;
#[cfg(test)]
@ -77,6 +78,7 @@ use hir_expand::{
builtin::{BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
eager::expand_eager_macro_input,
files::InFileWrapper,
impl_intern_lookup,
name::Name,
proc_macro::{CustomProcMacroExpander, ProcMacroKind},
@ -519,6 +521,41 @@ pub struct FieldId {
pub local_id: LocalFieldId,
}
impl FieldId {
pub fn record_field_source(
&self,
db: &dyn DefDatabase,
) -> InFileWrapper<HirFileId, Option<ast::RecordField>> {
let field_list = match self.parent {
crate::VariantId::EnumVariantId(it) => {
let s = it.lookup(db);
s.source(db).map(|it| {
it.field_list().and_then(|it| match it {
ast::FieldList::RecordFieldList(it) => Some(it),
_ => None,
})
})
}
crate::VariantId::StructId(it) => {
let s = it.lookup(db);
s.source(db).map(|it| {
it.field_list().and_then(|it| match it {
ast::FieldList::RecordFieldList(it) => Some(it),
_ => None,
})
})
}
crate::VariantId::UnionId(it) => {
let s = it.lookup(db);
s.source(db).map(|it| it.record_field_list())
}
};
field_list.map(|it| {
it.and_then(|it| it.fields().nth(self.local_id.into_raw().into_u32() as usize))
})
}
}
pub type LocalFieldId = Idx<data::adt::FieldData>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -686,6 +723,7 @@ pub enum TypeOwnerId {
TypeAliasId(TypeAliasId),
ImplId(ImplId),
EnumVariantId(EnumVariantId),
FieldId(FieldId),
}
impl TypeOwnerId {
@ -703,6 +741,11 @@ impl TypeOwnerId {
GenericDefId::AdtId(AdtId::EnumId(it.lookup(db).parent))
}
TypeOwnerId::InTypeConstId(_) => return None,
TypeOwnerId::FieldId(it) => GenericDefId::AdtId(match it.parent {
VariantId::EnumVariantId(it) => AdtId::EnumId(it.lookup(db).parent),
VariantId::StructId(it) => it.into(),
VariantId::UnionId(it) => it.into(),
}),
})
}
}
@ -717,7 +760,8 @@ impl_from!(
TraitAliasId,
TypeAliasId,
ImplId,
EnumVariantId
EnumVariantId,
FieldId
for TypeOwnerId
);
@ -730,6 +774,7 @@ impl From<DefWithBodyId> for TypeOwnerId {
DefWithBodyId::ConstId(it) => it.into(),
DefWithBodyId::InTypeConstId(it) => it.into(),
DefWithBodyId::VariantId(it) => it.into(),
DefWithBodyId::FieldId(it) => it.into(),
}
}
}
@ -885,6 +930,7 @@ pub enum DefWithBodyId {
ConstId(ConstId),
InTypeConstId(InTypeConstId),
VariantId(EnumVariantId),
FieldId(FieldId),
}
impl_from!(FunctionId, ConstId, StaticId, InTypeConstId for DefWithBodyId);
@ -905,6 +951,7 @@ impl DefWithBodyId {
// FIXME: stable rust doesn't allow generics in constants, but we should
// use `TypeOwnerId::as_generic_def_id` when it does.
DefWithBodyId::InTypeConstId(_) => None,
DefWithBodyId::FieldId(_) => None,
}
}
}
@ -1309,6 +1356,12 @@ impl HasModule for VariantId {
}
}
impl HasModule for FieldId {
fn module(&self, db: &dyn DefDatabase) -> ModuleId {
self.parent.module(db)
}
}
impl HasModule for MacroId {
fn module(&self, db: &dyn DefDatabase) -> ModuleId {
match *self {
@ -1332,6 +1385,7 @@ impl HasModule for TypeOwnerId {
TypeOwnerId::ImplId(it) => it.module(db),
TypeOwnerId::EnumVariantId(it) => it.module(db),
TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.module(db),
TypeOwnerId::FieldId(it) => it.module(db),
}
}
}
@ -1344,6 +1398,7 @@ impl HasModule for DefWithBodyId {
DefWithBodyId::ConstId(it) => it.module(db),
DefWithBodyId::VariantId(it) => it.module(db),
DefWithBodyId::InTypeConstId(it) => it.lookup(db).owner.module(db),
DefWithBodyId::FieldId(it) => it.module(db),
}
}
}

View file

@ -211,6 +211,20 @@ enum Bar {
#[default]
Bar,
}
#[derive(Default)]
struct Baz {
field1: i32 = 2,
field2: bool = { false },
}
#[derive(Default)]
enum Qux {
#[default]
Foo {
field1: i32,
field2: bool = true,
field3: (),
}
}
"#,
expect![[r#"
#[derive(Default)]
@ -224,6 +238,20 @@ enum Bar {
#[default]
Bar,
}
#[derive(Default)]
struct Baz {
field1: i32 = 2,
field2: bool = { false },
}
#[derive(Default)]
enum Qux {
#[default]
Foo {
field1: i32,
field2: bool = true,
field3: (),
}
}
impl <> $crate::default::Default for Foo< > where {
fn default() -> Self {
@ -236,6 +264,20 @@ impl <> $crate::default::Default for Bar< > where {
fn default() -> Self {
Bar::Bar
}
}
impl <> $crate::default::Default for Baz< > where {
fn default() -> Self {
Baz {
..
}
}
}
impl <> $crate::default::Default for Qux< > where {
fn default() -> Self {
Qux::Foo {
field1: $crate::default::Default::default(), field3: $crate::default::Default::default(), ..
}
}
}"#]],
);
}

View file

@ -27,10 +27,11 @@ use crate::{
type_ref::{LifetimeRef, TypesMap},
visibility::{RawVisibility, Visibility},
AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId,
ExternBlockId, ExternCrateId, FunctionId, FxIndexMap, GenericDefId, GenericParamId, HasModule,
ImplId, ItemContainerId, ItemTreeLoc, LifetimeParamId, LocalModuleId, Lookup, Macro2Id,
MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId,
TraitId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId,
ExternBlockId, ExternCrateId, FieldId, FunctionId, FxIndexMap, GenericDefId, GenericParamId,
HasModule, ImplId, ItemContainerId, ItemTreeLoc, LifetimeParamId, LocalModuleId, Lookup,
Macro2Id, MacroId, MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId,
TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId,
VariantId,
};
#[derive(Debug, Clone)]
@ -1227,6 +1228,7 @@ impl HasResolver for TypeOwnerId {
TypeOwnerId::TypeAliasId(it) => it.resolver(db),
TypeOwnerId::ImplId(it) => it.resolver(db),
TypeOwnerId::EnumVariantId(it) => it.resolver(db),
TypeOwnerId::FieldId(it) => it.resolver(db),
}
}
}
@ -1239,6 +1241,7 @@ impl HasResolver for DefWithBodyId {
DefWithBodyId::StaticId(s) => s.resolver(db),
DefWithBodyId::VariantId(v) => v.resolver(db),
DefWithBodyId::InTypeConstId(c) => c.lookup(db).owner.resolver(db),
DefWithBodyId::FieldId(f) => f.resolver(db),
}
}
}
@ -1285,6 +1288,12 @@ impl HasResolver for VariantId {
}
}
impl HasResolver for FieldId {
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
self.parent.resolver(db)
}
}
impl HasResolver for MacroId {
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
match self {

View file

@ -80,9 +80,15 @@ pub fn find_builtin_derive(ident: &name::Name) -> Option<BuiltinDeriveExpander>
BuiltinDeriveExpander::find_by_name(ident)
}
#[derive(Clone, Copy)]
enum HasDefault {
Yes,
No,
}
#[derive(Clone)]
enum VariantShape {
Struct(Vec<tt::Ident>),
Struct(Vec<(tt::Ident, HasDefault)>),
Tuple(usize),
Unit,
}
@ -98,7 +104,7 @@ impl VariantShape {
fn field_names(&self, span: Span) -> Vec<tt::Ident> {
match self {
VariantShape::Struct(s) => s.clone(),
VariantShape::Struct(s) => s.iter().map(|(ident, _)| ident.clone()).collect(),
VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(),
VariantShape::Unit => vec![],
}
@ -112,7 +118,7 @@ impl VariantShape {
) -> tt::TopSubtree {
match self {
VariantShape::Struct(fields) => {
let fields = fields.iter().map(|it| {
let fields = fields.iter().map(|(it, _)| {
let mapped = field_map(it);
quote! {span => #it : #mapped , }
});
@ -135,6 +141,63 @@ impl VariantShape {
}
}
fn default_expand(
&self,
path: tt::TopSubtree,
span: Span,
field_map: impl Fn(&tt::Ident) -> tt::TopSubtree,
) -> tt::TopSubtree {
match self {
VariantShape::Struct(fields) => {
let contains_default = fields.iter().any(|it| matches!(it.1, HasDefault::Yes));
let fields = fields
.iter()
.filter_map(|(it, has_default)| match has_default {
HasDefault::Yes => None,
HasDefault::No => Some(it),
})
.map(|it| {
let mapped = field_map(it);
quote! {span => #it : #mapped , }
});
if contains_default {
let mut double_dots =
tt::TopSubtreeBuilder::new(tt::Delimiter::invisible_spanned(span));
double_dots.push(tt::Leaf::Punct(tt::Punct {
char: '.',
spacing: tt::Spacing::Joint,
span,
}));
double_dots.push(tt::Leaf::Punct(tt::Punct {
char: '.',
spacing: tt::Spacing::Alone,
span,
}));
let double_dots = double_dots.build();
quote! {span =>
#path { ##fields #double_dots }
}
} else {
quote! {span =>
#path { ##fields }
}
}
}
&VariantShape::Tuple(n) => {
let fields = tuple_field_iterator(span, n).map(|it| {
let mapped = field_map(&it);
quote! {span =>
#mapped ,
}
});
quote! {span =>
#path ( ##fields )
}
}
VariantShape::Unit => path,
}
}
fn from(
call_site: Span,
tm: &ExpansionSpanMap,
@ -144,8 +207,15 @@ impl VariantShape {
None => VariantShape::Unit,
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
it.fields()
.map(|it| it.name())
.map(|it| name_to_token(call_site, tm, it))
.map(|it| {
(
it.name(),
if it.expr().is_some() { HasDefault::Yes } else { HasDefault::No },
)
})
.map(|(it, has_default)| {
name_to_token(call_site, tm, it).map(|ident| (ident, has_default))
})
.collect::<Result<_, _>>()?,
),
Some(FieldList::TupleFieldList(it)) => VariantShape::Tuple(it.fields().count()),
@ -601,7 +671,7 @@ fn default_expand(
let body = match &adt.shape {
AdtShape::Struct(fields) => {
let name = &adt.name;
fields.as_pattern_map(
fields.default_expand(
quote!(span =>#name),
span,
|_| quote!(span =>#krate::default::Default::default()),
@ -611,7 +681,7 @@ fn default_expand(
if let Some(d) = default_variant {
let (name, fields) = &variants[*d];
let adt_name = &adt.name;
fields.as_pattern_map(
fields.default_expand(
quote!(span =>#adt_name :: #name),
span,
|_| quote!(span =>#krate::default::Default::default()),
@ -643,7 +713,7 @@ fn debug_expand(
expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
let for_fields = fields.iter().map(|(it, _)| {
let x_string = it.to_string();
quote! {span =>
.field(#x_string, & #it)

View file

@ -8,6 +8,7 @@ use base_db::CrateId;
use chalk_solve::rust_ir::AdtKind;
use either::Either;
use hir_def::{
hir::Spread,
lang_item::LangItem,
resolver::{HasResolver, ValueNs},
AdtId, AssocItemId, DefWithBodyId, HasModule, ItemContainerId, Lookup,
@ -546,9 +547,11 @@ pub fn record_literal_missing_fields(
infer: &InferenceResult,
id: ExprId,
expr: &Expr,
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
let (fields, exhaustive) = match expr {
Expr::RecordLit { fields, spread, .. } => (fields, spread.is_none()),
) -> Option<(VariantId, Vec<LocalFieldId>, /*has spread expr*/ bool)> {
let (fields, has_spread_expr, has_ellipsis) = match expr {
Expr::RecordLit { fields, spread, .. } => {
(fields, !matches!(spread, Spread::Base(_)), matches!(spread, Spread::Yes))
}
_ => return None,
};
@ -563,12 +566,18 @@ pub fn record_literal_missing_fields(
let missed_fields: Vec<LocalFieldId> = variant_data
.fields()
.iter()
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
.filter_map(|(f, d)| {
if (has_ellipsis && d.has_default) || specified_fields.contains(&d.name) {
None
} else {
Some(f)
}
})
.collect();
if missed_fields.is_empty() {
return None;
}
Some((variant_def, missed_fields, exhaustive))
Some((variant_def, missed_fields, has_spread_expr))
}
pub fn record_pattern_missing_fields(

View file

@ -33,7 +33,8 @@ pub fn missing_unsafe(
DefWithBodyId::StaticId(_)
| DefWithBodyId::ConstId(_)
| DefWithBodyId::VariantId(_)
| DefWithBodyId::InTypeConstId(_) => false,
| DefWithBodyId::InTypeConstId(_)
| DefWithBodyId::FieldId(_) => false,
};
let body = db.body(def);

View file

@ -134,6 +134,9 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
.unwrap()
.0;
}
DefWithBodyId::FieldId(f) => {
ctx.collect_field(f);
}
}
ctx.infer_body();
@ -910,6 +913,19 @@ impl<'a> InferenceContext<'a> {
self.return_ty = return_ty;
}
fn collect_field(&mut self, field: FieldId) {
let variant_data = field.parent.variant_data(self.db.upcast());
let field_data = &variant_data.fields()[field.local_id];
let types_map = variant_data.types_map();
let return_ty =
self.make_ty(field_data.type_ref, types_map, InferenceTyDiagnosticSource::Signature);
// Field default value exprs might be defining usage sites of TAITs.
self.make_tait_coercion_table(iter::once(&return_ty));
self.return_ty = return_ty;
}
fn collect_fn(&mut self, func: FunctionId) {
let data = self.db.function_data(func);
let mut param_tys =

View file

@ -12,7 +12,7 @@ use hir_def::{
data::adt::VariantData,
hir::{
Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, ExprOrPatId, Pat, PatId,
Statement, UnaryOp,
Spread, Statement, UnaryOp,
},
lang_item::LangItem,
path::Path,
@ -796,7 +796,7 @@ impl InferenceContext<'_> {
self.consume_expr(expr);
}
Expr::RecordLit { fields, spread, .. } => {
if let &Some(expr) = spread {
if let &Spread::Base(expr) = spread {
self.consume_expr(expr);
}
self.consume_exprs(fields.iter().map(|it| it.expr));

View file

@ -10,7 +10,7 @@ use either::Either;
use hir_def::{
hir::{
ArithOp, Array, AsmOperand, AsmOptions, BinaryOp, ClosureKind, Expr, ExprId, ExprOrPatId,
LabelId, Literal, Pat, PatId, Statement, UnaryOp,
LabelId, Literal, Pat, PatId, Spread, Statement, UnaryOp,
},
lang_item::{LangItem, LangItemTarget},
path::{GenericArg, GenericArgs, Path},
@ -775,7 +775,7 @@ impl InferenceContext<'_> {
}
}
}
if let Some(expr) = spread {
if let Spread::Base(expr) = spread {
self.infer_expr(*expr, &Expectation::has_type(ty.clone()), ExprIsRead::Yes);
}
ty
@ -1746,12 +1746,14 @@ impl InferenceContext<'_> {
});
}
TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
let local_id = self.db.struct_data(*s).variant_data.field(name)?;
let vd = &self.db.struct_data(*s).variant_data;
let local_id = vd.field(name)?;
let field = FieldId { parent: (*s).into(), local_id };
(field, parameters.clone())
}
TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
let local_id = self.db.union_data(*u).variant_data.field(name)?;
let vd = &self.db.union_data(*u).variant_data;
let local_id = vd.field(name)?;
let field = FieldId { parent: (*u).into(), local_id };
(field, parameters.clone())
}

View file

@ -4,8 +4,8 @@
use chalk_ir::{cast::Cast, Mutability};
use hir_def::{
hir::{
Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement,
UnaryOp,
Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Spread,
Statement, UnaryOp,
},
lang_item::LangItem,
};
@ -122,7 +122,11 @@ impl InferenceContext<'_> {
self.infer_mut_expr(*expr, Mutability::Not);
}
Expr::RecordLit { path: _, fields, spread } => {
self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
let spread_expr = match spread {
Spread::Base(expr) => Some(*expr),
_ => None,
};
self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(spread_expr))
}
&Expr::Index { base, index } => {
if mutability == Mutability::Mut {

View file

@ -9,7 +9,7 @@ use hir_def::{
expr_store::{Body, HygieneId},
hir::{
ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal,
LiteralOrConst, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField,
LiteralOrConst, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField, Spread,
},
lang_item::{LangItem, LangItemTarget},
path::Path,
@ -825,14 +825,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
Expr::Yield { .. } => not_supported!("yield"),
Expr::RecordLit { fields, path, spread } => {
let spread_place = match spread {
&Some(it) => {
&Spread::Base(it) => {
let Some((p, c)) = self.lower_expr_as_place(current, it, true)? else {
return Ok(None);
};
current = c;
Some(p)
}
None => None,
_ => None,
};
let variant_id =
self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path {
@ -870,12 +870,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
.map(|(i, it)| match it {
Some(it) => it,
None => {
let local_id =
LocalFieldId::from_raw(RawIdx::from(i as u32));
let p = sp.project(
ProjectionElem::Field(Either::Left(FieldId {
parent: variant_id,
local_id: LocalFieldId::from_raw(RawIdx::from(
i as u32,
)),
local_id,
})),
&mut self.result.projection_store,
);
@ -2130,6 +2130,10 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
db.enum_variant_data(it).name.display(db.upcast(), edition).to_string()
}
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
DefWithBodyId::FieldId(it) => it.parent.variant_data(db.upcast()).fields()[it.local_id]
.name
.display(db.upcast(), edition)
.to_string(),
};
let _p = tracing::info_span!("mir_body_query", ?detail).entered();
let body = db.body(def);

View file

@ -6,7 +6,7 @@ use std::{
};
use either::Either;
use hir_def::{expr_store::Body, hir::BindingId};
use hir_def::{expr_store::Body, hir::BindingId, VariantId};
use hir_expand::{name::Name, Lookup};
use la_arena::ArenaMap;
use span::Edition;
@ -79,6 +79,38 @@ impl MirBody {
hir_def::DefWithBodyId::InTypeConstId(id) => {
w!(this, "in type const {id:?} = ");
}
hir_def::DefWithBodyId::FieldId(id) => {
w!(this, "field ");
match id.parent {
VariantId::EnumVariantId(it) => {
let loc = it.lookup(db.upcast());
let enum_loc = loc.parent.lookup(db.upcast());
w!(
this,
"{}::{}",
enum_loc.id.item_tree(db.upcast())[enum_loc.id.value]
.name
.display(db.upcast(), Edition::LATEST),
loc.id.item_tree(db.upcast())[loc.id.value]
.name
.display(db.upcast(), Edition::LATEST),
);
}
VariantId::StructId(id) => {
id.lookup(db.upcast()).id.resolved(db.upcast(), |it| {
w!(this, "{}", it.name.display(db.upcast(), Edition::LATEST));
});
}
VariantId::UnionId(id) => {
id.lookup(db.upcast()).id.resolved(db.upcast(), |it| {
w!(this, "{}", it.name.display(db.upcast(), Edition::LATEST));
});
}
};
let variant_data = id.parent.variant_data(db.upcast());
let field_name = &variant_data.fields()[id.local_id].name;
w!(this, ".{}: _ = ", field_name.display(db.upcast(), Edition::LATEST));
}
});
ctx.result
}

View file

@ -16,6 +16,7 @@ use std::env;
use std::sync::LazyLock;
use base_db::SourceDatabaseFileInputExt as _;
use either::Either;
use expect_test::Expect;
use hir_def::{
db::DefDatabase,
@ -23,12 +24,14 @@ use hir_def::{
hir::{ExprId, Pat, PatId},
item_scope::ItemScope,
nameres::DefMap,
src::HasSource,
AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId, SyntheticSyntax,
src::{HasChildSource, HasSource},
AdtId, AssocItemId, DefWithBodyId, FieldId, HasModule, LocalModuleId, Lookup, ModuleDefId,
SyntheticSyntax,
};
use hir_expand::{db::ExpandDatabase, FileRange, InFile};
use itertools::Itertools;
use rustc_hash::FxHashMap;
use span::TextSize;
use stdx::format_to;
use syntax::{
ast::{self, AstNode, HasName},
@ -132,14 +135,40 @@ fn check_impl(
None => continue,
};
let def_map = module.def_map(&db);
visit_module(&db, &def_map, module.local_id, &mut |it| {
defs.push(match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
})
visit_module(&db, &def_map, module.local_id, &mut |it| match it {
ModuleDefId::FunctionId(it) => defs.push(it.into()),
ModuleDefId::EnumVariantId(it) => {
defs.push(it.into());
let variant_id = it.into();
let vd = db.variant_data(variant_id);
defs.extend(vd.fields().iter().filter_map(|(local_id, fd)| {
if fd.has_default {
let field = FieldId { parent: variant_id, local_id };
Some(DefWithBodyId::FieldId(field))
} else {
None
}
}));
}
ModuleDefId::ConstId(it) => defs.push(it.into()),
ModuleDefId::StaticId(it) => defs.push(it.into()),
ModuleDefId::AdtId(it) => {
let variant_id = match it {
AdtId::StructId(it) => it.into(),
AdtId::UnionId(it) => it.into(),
AdtId::EnumId(_) => return,
};
let vd = db.variant_data(variant_id);
defs.extend(vd.fields().iter().filter_map(|(local_id, fd)| {
if fd.has_default {
let field = FieldId { parent: variant_id, local_id };
Some(DefWithBodyId::FieldId(field))
} else {
None
}
}));
}
_ => {}
});
}
defs.sort_by_key(|def| match def {
@ -160,6 +189,14 @@ fn check_impl(
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::InTypeConstId(it) => it.source(&db).syntax().text_range().start(),
DefWithBodyId::FieldId(it) => {
let cs = it.parent.child_source(&db);
match cs.value.get(it.local_id) {
Some(Either::Left(it)) => it.syntax().text_range().start(),
Some(Either::Right(it)) => it.syntax().text_range().end(),
None => TextSize::new(u32::MAX),
}
}
});
let mut unexpected_type_mismatches = String::new();
for def in defs {
@ -388,14 +425,40 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let def_map = module.def_map(&db);
let mut defs: Vec<DefWithBodyId> = Vec::new();
visit_module(&db, &def_map, module.local_id, &mut |it| {
defs.push(match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
})
visit_module(&db, &def_map, module.local_id, &mut |it| match it {
ModuleDefId::FunctionId(it) => defs.push(it.into()),
ModuleDefId::EnumVariantId(it) => {
defs.push(it.into());
let variant_id = it.into();
let vd = db.variant_data(variant_id);
defs.extend(vd.fields().iter().filter_map(|(local_id, fd)| {
if fd.has_default {
let field = FieldId { parent: variant_id, local_id };
Some(DefWithBodyId::FieldId(field))
} else {
None
}
}));
}
ModuleDefId::ConstId(it) => defs.push(it.into()),
ModuleDefId::StaticId(it) => defs.push(it.into()),
ModuleDefId::AdtId(it) => {
let variant_id = match it {
AdtId::StructId(it) => it.into(),
AdtId::UnionId(it) => it.into(),
AdtId::EnumId(_) => return,
};
let vd = db.variant_data(variant_id);
defs.extend(vd.fields().iter().filter_map(|(local_id, fd)| {
if fd.has_default {
let field = FieldId { parent: variant_id, local_id };
Some(DefWithBodyId::FieldId(field))
} else {
None
}
}));
}
_ => {}
});
defs.sort_by_key(|def| match def {
DefWithBodyId::FunctionId(it) => {
@ -415,6 +478,14 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::InTypeConstId(it) => it.source(&db).syntax().text_range().start(),
DefWithBodyId::FieldId(it) => {
let cs = it.parent.child_source(&db);
match cs.value.get(it.local_id) {
Some(Either::Left(it)) => it.syntax().text_range().start(),
Some(Either::Right(it)) => it.syntax().text_range().end(),
None => TextSize::new(u32::MAX),
}
}
});
for def in defs {
let (body, source_map) = db.body_with_source_map(def);
@ -475,7 +546,7 @@ pub(crate) fn visit_module(
let body = db.body(it.into());
visit_body(db, &body, cb);
}
ModuleDefId::AdtId(hir_def::AdtId::EnumId(it)) => {
ModuleDefId::AdtId(AdtId::EnumId(it)) => {
db.enum_data(it).variants.iter().for_each(|&(it, _)| {
let body = db.body(it.into());
cb(it.into());

View file

@ -157,5 +157,53 @@ static ALIAS: i32 = {
217..218 '5': i32
205..211: expected impl Trait + ?Sized, got Struct
"#]],
)
);
}
#[test]
fn defining_type_alias_impl_trait_from_default_fields() {
check_no_mismatches(
r#"
trait Trait {}
struct Struct;
impl Trait for Struct {}
type AliasTy = impl Trait;
struct Foo {
foo: AliasTy = {
let x: AliasTy = Struct;
x
},
}
"#,
);
check_infer_with_mismatches(
r#"
trait Trait {}
struct Struct;
impl Trait for Struct {}
type AliasTy = impl Trait;
struct Foo {
foo: i32 = {
let x: AliasTy = Struct;
5
},
}
"#,
expect![[r#"
114..164 '{ ... }': i32
128..129 'x': impl Trait + ?Sized
141..147 'Struct': Struct
157..158 '5': i32
141..147: expected impl Trait + ?Sized, got Struct
"#]],
);
}

View file

@ -147,6 +147,7 @@ impl From<DefWithBody> for DefWithBodyId {
DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id),
DefWithBody::Variant(it) => DefWithBodyId::VariantId(it.into()),
DefWithBody::InTypeConst(it) => DefWithBodyId::InTypeConstId(it.id),
DefWithBody::Field(it) => DefWithBodyId::FieldId(it.into()),
}
}
}
@ -159,6 +160,7 @@ impl From<DefWithBodyId> for DefWithBody {
DefWithBodyId::ConstId(it) => DefWithBody::Const(it.into()),
DefWithBodyId::VariantId(it) => DefWithBody::Variant(it.into()),
DefWithBodyId::InTypeConstId(it) => DefWithBody::InTypeConst(it.into()),
DefWithBodyId::FieldId(it) => DefWithBody::Field(it.into()),
}
}
}

View file

@ -415,6 +415,28 @@ impl ModuleDef {
def.diagnostics(db, &mut acc);
}
let vd: Option<(VariantDef, Arc<VariantData>)> = match self {
ModuleDef::Adt(Adt::Struct(it)) => {
Some((it.into(), db.struct_data(it.id).variant_data.clone()))
}
ModuleDef::Adt(Adt::Union(it)) => {
Some((it.into(), db.union_data(it.id).variant_data.clone()))
}
ModuleDef::Variant(it) => {
Some((it.into(), db.enum_variant_data(it.id).variant_data.clone()))
}
_ => None,
};
if let Some((parent, vd)) = vd {
for (id, fd) in vd.fields().iter() {
if !fd.has_default {
continue;
}
let def: DefWithBody = DefWithBody::Field(Field { parent, id });
def.diagnostics(db, &mut acc, style_lints);
}
}
acc
}
@ -1226,6 +1248,12 @@ impl HasVisibility for Module {
}
}
impl From<&Field> for DefWithBodyId {
fn from(&f: &Field) -> Self {
DefWithBodyId::FieldId(f.into())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Field {
pub(crate) parent: VariantDef,
@ -1291,6 +1319,10 @@ impl AstNode for FieldSource {
}
impl Field {
pub fn module(self, db: &dyn HirDatabase) -> Module {
self.parent.module(db)
}
pub fn name(&self, db: &dyn HirDatabase) -> Name {
self.parent.variant_data(db).fields()[self.id].name.clone()
}
@ -1353,6 +1385,14 @@ impl Field {
pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef {
self.parent
}
pub fn default_value_source(
&self,
db: &dyn HirDatabase,
) -> Option<InFileWrapper<HirFileId, ast::Expr>> {
let id: hir_def::FieldId = (*self).into();
id.record_field_source(db.upcast()).map(|it| it.and_then(|it| it.expr())).transpose()
}
}
impl HasVisibility for Field {
@ -1789,8 +1829,9 @@ pub enum DefWithBody {
Const(Const),
Variant(Variant),
InTypeConst(InTypeConst),
Field(Field),
}
impl_from!(Function, Const, Static, Variant, InTypeConst for DefWithBody);
impl_from!(Function, Const, Static, Variant, InTypeConst, Field for DefWithBody);
impl DefWithBody {
pub fn module(self, db: &dyn HirDatabase) -> Module {
@ -1800,6 +1841,7 @@ impl DefWithBody {
DefWithBody::Static(s) => s.module(db),
DefWithBody::Variant(v) => v.module(db),
DefWithBody::InTypeConst(c) => c.module(db),
DefWithBody::Field(f) => f.module(db),
}
}
@ -1810,6 +1852,7 @@ impl DefWithBody {
DefWithBody::Const(c) => c.name(db),
DefWithBody::Variant(v) => Some(v.name(db)),
DefWithBody::InTypeConst(_) => None,
DefWithBody::Field(f) => Some(f.name(db)),
}
}
@ -1825,6 +1868,7 @@ impl DefWithBody {
&DefWithBodyId::from(it.id).resolver(db.upcast()),
TyKind::Error.intern(Interner),
),
DefWithBody::Field(it) => it.ty(db),
}
}
@ -1835,6 +1879,7 @@ impl DefWithBody {
DefWithBody::Const(it) => it.id.into(),
DefWithBody::Variant(it) => it.into(),
DefWithBody::InTypeConst(it) => it.id.into(),
DefWithBody::Field(it) => it.into(),
}
}
@ -1880,6 +1925,23 @@ impl DefWithBody {
item_tree_source_maps.konst(konst.value)
}
DefWithBody::Variant(_) | DefWithBody::InTypeConst(_) => &TypesSourceMap::EMPTY,
DefWithBody::Field(field) => match field.parent {
VariantDef::Struct(strukt) => {
let strukt = strukt.id.lookup(db.upcast()).id;
item_tree_source_maps = strukt.item_tree_with_source_map(db.upcast()).1;
item_tree_source_maps.strukt(strukt.value).item()
}
VariantDef::Union(union) => {
let union = union.id.lookup(db.upcast()).id;
item_tree_source_maps = union.item_tree_with_source_map(db.upcast()).1;
item_tree_source_maps.union(union.value).item()
}
VariantDef::Variant(variant) => {
let variant = variant.id.lookup(db.upcast()).id;
item_tree_source_maps = variant.item_tree_with_source_map(db.upcast()).1;
item_tree_source_maps.variant(variant.value)
}
},
};
for (_, def_map) in body.blocks(db.upcast()) {
@ -2111,8 +2173,8 @@ impl DefWithBody {
DefWithBody::Static(it) => it.into(),
DefWithBody::Const(it) => it.into(),
DefWithBody::Variant(it) => it.into(),
// FIXME: don't ignore diagnostics for in type const
DefWithBody::InTypeConst(_) => return,
// FIXME: don't ignore diagnostics for in type const and default field value exprs
DefWithBody::InTypeConst(_) | DefWithBody::Field(_) => return,
};
for diag in hir_ty::diagnostics::incorrect_case(db, def.into()) {
acc.push(diag.into())
@ -3237,7 +3299,10 @@ impl AsAssocItem for DefWithBody {
match self {
DefWithBody::Function(it) => it.as_assoc_item(db),
DefWithBody::Const(it) => it.as_assoc_item(db),
DefWithBody::Static(_) | DefWithBody::Variant(_) | DefWithBody::InTypeConst(_) => None,
DefWithBody::Static(_)
| DefWithBody::Variant(_)
| DefWithBody::InTypeConst(_)
| DefWithBody::Field(_) => None,
}
}
}

View file

@ -630,7 +630,8 @@ impl SourceAnalyzer {
let (adt, subst) = self.infer.as_ref()?.type_of_expr_or_pat(expr_id)?.as_adt()?;
let variant = self.infer.as_ref()?.variant_resolution_for_expr_or_pat(expr_id)?;
let variant_data = variant.variant_data(db.upcast());
let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? };
let local_id = variant_data.field(&local_name)?;
let field = FieldId { parent: variant, local_id };
let field_ty =
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
Some((
@ -651,7 +652,8 @@ impl SourceAnalyzer {
let pat_id = self.pat_id(&record_pat.into())?;
let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id)?;
let variant_data = variant.variant_data(db.upcast());
let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
let local_id = variant_data.field(&field_name)?;
let field = FieldId { parent: variant, local_id };
let (adt, subst) = self.infer.as_ref()?.type_of_pat.get(pat_id)?.as_adt()?;
let field_ty =
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
@ -1023,7 +1025,7 @@ impl SourceAnalyzer {
let expr_id = self.expr_id(db, &literal.clone().into())?;
let substs = infer[expr_id].as_adt()?.1;
let (variant, missing_fields, _exhaustive) = match expr_id {
let (variant, missing_fields, _) = match expr_id {
ExprOrPatId::ExprId(expr_id) => {
record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?
}

View file

@ -972,6 +972,7 @@ impl TryFrom<DefWithBody> for Definition {
DefWithBody::Const(it) => Ok(it.into()),
DefWithBody::Variant(it) => Ok(it.into()),
DefWithBody::InTypeConst(_) => Err(()),
DefWithBody::Field(it) => Ok(it.into()),
}
}
}

View file

@ -310,6 +310,9 @@ impl Definition {
DefWithBody::Variant(v) => v.source(db).map(|src| src.syntax().cloned()),
// FIXME: implement
DefWithBody::InTypeConst(_) => return SearchScope::empty(),
DefWithBody::Field(f) => {
f.default_value_source(db).map(|src| src.syntax().cloned())
}
};
return match def {
Some(def) => SearchScope::file_range(
@ -327,6 +330,9 @@ impl Definition {
DefWithBody::Variant(v) => v.source(db).map(|src| src.syntax().cloned()),
// FIXME: implement
DefWithBody::InTypeConst(_) => return SearchScope::empty(),
DefWithBody::Field(f) => {
f.default_value_source(db).map(|src| src.syntax().cloned())
}
};
return match def {
Some(def) => SearchScope::file_range(

View file

@ -846,4 +846,35 @@ pub struct Claims {
"#,
);
}
#[test]
fn default_field_values() {
check_diagnostics(
r#"
struct F {
field1: i32 = 4,
field2: bool,
}
fn f() {
let _f = F {
field2: true,
..
};
let _f = F {
//^ 💡 error: missing structure fields:
//| - field1
field2: true,
};
let _f = F {
//^ 💡 error: missing structure fields:
//| - field2
..
};
}
"#,
);
}
}

View file

@ -1232,6 +1232,21 @@ fn f() {
let (_, _, _, ..) = (true, 42);
// ^^^^^^^^^^^^^ error: expected (bool, i32), found (bool, i32, {unknown})
}
"#,
);
}
#[test]
fn diagnostics_inside_field_default_expr() {
check_diagnostics(
r#"
struct Foo {
foo: i32 = {
let x = false;
x
// ^ error: expected i32, found bool
},
}
"#,
);
}

View file

@ -678,6 +678,8 @@ fn path_expr(p: &mut Parser<'_>, r: Restrictions) -> (CompletedMarker, BlockLike
// S { x };
// S { x, y: 32, };
// S { x, y: 32, ..Default::default() };
// S { x, y: 32, .. };
// S { .. };
// S { x: ::default() };
// TupleStruct { 0: 1 };
// }
@ -709,6 +711,8 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) {
// fn main() {
// S { field ..S::default() }
// S { 0 ..S::default() }
// S { field .. }
// S { 0 .. }
// }
name_ref_or_index(p);
p.error("expected `:`");
@ -739,7 +743,13 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) {
// S { .. } = S {};
// }
// We permit `.. }` on the left-hand side of a destructuring assignment.
// test struct_initializer_with_defaults
// fn foo() {
// let _s = S { .. };
// }
// We permit `.. }` on the left-hand side of a destructuring assignment
// or defaults values.
if !p.at(T!['}']) {
expr(p);
@ -750,6 +760,12 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) {
// S { ..x, a: 0 }
// }
// test_err comma_after_default_values_syntax
// fn foo() {
// S { .., };
// S { .., a: 0 }
// }
// Do not bump, so we can support additional fields after this comma.
p.error("cannot use a comma after the base struct");
}

View file

@ -135,6 +135,11 @@ pub(crate) fn record_field_list(p: &mut Parser<'_>) {
name(p);
p.expect(T![:]);
types::type_(p);
// test record_field_default_values
// struct S { f: f32 = 0.0 }
if p.eat(T![=]) {
expressions::expr(p);
}
m.complete(p, RECORD_FIELD);
} else {
m.abandon(p);

View file

@ -482,6 +482,10 @@ mod ok {
run_and_expect_no_errors("test_data/parser/inline/ok/record_field_attrs.rs");
}
#[test]
fn record_field_default_values() {
run_and_expect_no_errors("test_data/parser/inline/ok/record_field_default_values.rs");
}
#[test]
fn record_field_list() {
run_and_expect_no_errors("test_data/parser/inline/ok/record_field_list.rs");
}
@ -544,6 +548,10 @@ mod ok {
run_and_expect_no_errors("test_data/parser/inline/ok/stmt_postfix_expr_ambiguity.rs");
}
#[test]
fn struct_initializer_with_defaults() {
run_and_expect_no_errors("test_data/parser/inline/ok/struct_initializer_with_defaults.rs");
}
#[test]
fn struct_item() { run_and_expect_no_errors("test_data/parser/inline/ok/struct_item.rs"); }
#[test]
fn trait_alias() { run_and_expect_no_errors("test_data/parser/inline/ok/trait_alias.rs"); }
@ -719,6 +727,10 @@ mod err {
);
}
#[test]
fn comma_after_default_values_syntax() {
run_and_expect_errors("test_data/parser/inline/err/comma_after_default_values_syntax.rs");
}
#[test]
fn crate_visibility_empty_recover() {
run_and_expect_errors("test_data/parser/inline/err/crate_visibility_empty_recover.rs");
}

View file

@ -0,0 +1,59 @@
SOURCE_FILE
FN
FN_KW "fn"
WHITESPACE " "
NAME
IDENT "foo"
PARAM_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE "\n "
EXPR_STMT
RECORD_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
WHITESPACE " "
RECORD_EXPR_FIELD_LIST
L_CURLY "{"
WHITESPACE " "
DOT2 ".."
ERROR
COMMA ","
WHITESPACE " "
R_CURLY "}"
SEMICOLON ";"
WHITESPACE "\n "
RECORD_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
WHITESPACE " "
RECORD_EXPR_FIELD_LIST
L_CURLY "{"
WHITESPACE " "
DOT2 ".."
ERROR
COMMA ","
WHITESPACE " "
RECORD_EXPR_FIELD
NAME_REF
IDENT "a"
COLON ":"
WHITESPACE " "
LITERAL
INT_NUMBER "0"
WHITESPACE " "
R_CURLY "}"
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"
error 21: expected expression
error 36: expected expression
error 37: expected COMMA

View file

@ -0,0 +1,4 @@
fn foo() {
S { .., };
S { .., a: 0 }
}

View file

@ -44,6 +44,56 @@ SOURCE_FILE
WHITESPACE " "
R_CURLY "}"
WHITESPACE "\n "
EXPR_STMT
RECORD_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
WHITESPACE " "
RECORD_EXPR_FIELD_LIST
L_CURLY "{"
WHITESPACE " "
RECORD_EXPR_FIELD
NAME_REF
INT_NUMBER "0"
WHITESPACE " "
DOT2 ".."
CALL_EXPR
PATH_EXPR
PATH
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "default"
ARG_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
R_CURLY "}"
WHITESPACE "\n "
EXPR_STMT
RECORD_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
WHITESPACE " "
RECORD_EXPR_FIELD_LIST
L_CURLY "{"
WHITESPACE " "
RECORD_EXPR_FIELD
NAME_REF
IDENT "field"
WHITESPACE " "
DOT2 ".."
WHITESPACE " "
R_CURLY "}"
WHITESPACE "\n "
RECORD_EXPR
PATH
PATH_SEGMENT
@ -58,20 +108,6 @@ SOURCE_FILE
INT_NUMBER "0"
WHITESPACE " "
DOT2 ".."
CALL_EXPR
PATH_EXPR
PATH
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
COLON2 "::"
PATH_SEGMENT
NAME_REF
IDENT "default"
ARG_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
R_CURLY "}"
WHITESPACE "\n"
@ -82,3 +118,9 @@ error 25: expected COMMA
error 42: expected SEMICOLON
error 52: expected `:`
error 52: expected COMMA
error 69: expected SEMICOLON
error 83: expected `:`
error 83: expected COMMA
error 88: expected SEMICOLON
error 98: expected `:`
error 98: expected COMMA

View file

@ -1,4 +1,6 @@
fn main() {
S { field ..S::default() }
S { 0 ..S::default() }
S { field .. }
S { 0 .. }
}

View file

@ -0,0 +1,28 @@
SOURCE_FILE
STRUCT
STRUCT_KW "struct"
WHITESPACE " "
NAME
IDENT "S"
WHITESPACE " "
RECORD_FIELD_LIST
L_CURLY "{"
WHITESPACE " "
RECORD_FIELD
NAME
IDENT "f"
COLON ":"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
IDENT "f32"
WHITESPACE " "
EQ "="
WHITESPACE " "
LITERAL
FLOAT_NUMBER "0.0"
WHITESPACE " "
R_CURLY "}"
WHITESPACE "\n"

View file

@ -0,0 +1 @@
struct S { f: f32 = 0.0 }

View file

@ -120,6 +120,53 @@ SOURCE_FILE
R_CURLY "}"
SEMICOLON ";"
WHITESPACE "\n "
EXPR_STMT
RECORD_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
WHITESPACE " "
RECORD_EXPR_FIELD_LIST
L_CURLY "{"
WHITESPACE " "
RECORD_EXPR_FIELD
PATH_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "x"
COMMA ","
WHITESPACE " "
RECORD_EXPR_FIELD
NAME_REF
IDENT "y"
COLON ":"
WHITESPACE " "
LITERAL
INT_NUMBER "32"
COMMA ","
WHITESPACE " "
DOT2 ".."
WHITESPACE " "
R_CURLY "}"
SEMICOLON ";"
WHITESPACE "\n "
EXPR_STMT
RECORD_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
WHITESPACE " "
RECORD_EXPR_FIELD_LIST
L_CURLY "{"
WHITESPACE " "
DOT2 ".."
WHITESPACE " "
R_CURLY "}"
SEMICOLON ";"
WHITESPACE "\n "
EXPR_STMT
RECORD_EXPR
PATH

View file

@ -3,6 +3,8 @@ fn foo() {
S { x };
S { x, y: 32, };
S { x, y: 32, ..Default::default() };
S { x, y: 32, .. };
S { .. };
S { x: ::default() };
TupleStruct { 0: 1 };
}

View file

@ -0,0 +1,39 @@
SOURCE_FILE
FN
FN_KW "fn"
WHITESPACE " "
NAME
IDENT "foo"
PARAM_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE "\n "
LET_STMT
LET_KW "let"
WHITESPACE " "
IDENT_PAT
NAME
IDENT "_s"
WHITESPACE " "
EQ "="
WHITESPACE " "
RECORD_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
WHITESPACE " "
RECORD_EXPR_FIELD_LIST
L_CURLY "{"
WHITESPACE " "
DOT2 ".."
WHITESPACE " "
R_CURLY "}"
SEMICOLON ";"
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"

View file

@ -0,0 +1,3 @@
fn foo() {
let _s = S { .. };
}

View file

@ -673,6 +673,9 @@ impl flags::AnalysisStats {
DefWithBody::Const(it) => it.source(db).map(|it| it.syntax().cloned()),
DefWithBody::Variant(it) => it.source(db).map(|it| it.syntax().cloned()),
DefWithBody::InTypeConst(_) => unimplemented!(),
DefWithBody::Field(it) => {
it.default_value_source(db).map(|it| it.syntax().cloned())
}
};
if let Some(src) = source {
let original_file = src.file_id.original_file(db);
@ -987,6 +990,9 @@ impl flags::AnalysisStats {
DefWithBody::Const(it) => it.source(db).map(|it| it.syntax().cloned()),
DefWithBody::Variant(it) => it.source(db).map(|it| it.syntax().cloned()),
DefWithBody::InTypeConst(_) => unimplemented!(),
DefWithBody::Field(it) => {
it.default_value_source(db).map(|it| it.syntax().cloned())
}
};
if let Some(src) = source {
let original_file = src.file_id.original_file(db);

View file

@ -241,7 +241,7 @@ RecordFieldList =
RecordField =
Attr* Visibility?
Name ':' Type
Name ':' Type ('=' Expr)?
TupleFieldList =
'(' fields:(TupleField (',' TupleField)* ','?)? ')'

View file

@ -1538,10 +1538,14 @@ impl ast::HasDocComments for RecordField {}
impl ast::HasName for RecordField {}
impl ast::HasVisibility for RecordField {}
impl RecordField {
#[inline]
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
#[inline]
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
#[inline]
pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
#[inline]
pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]