Merge branch 'master' of github.com:rust-analyzer/rust-analyzer

This commit is contained in:
Benjamin Coenen 2020-04-14 19:20:30 +02:00
commit b092bbc83d
25 changed files with 557 additions and 145 deletions

View file

@ -1,7 +1,7 @@
use std::iter::successors;
use ra_syntax::{
algo::{neighbor, SyntaxRewriter},
algo::{neighbor, skip_trivia_token, SyntaxRewriter},
ast::{self, edit::AstNodeEdit, make},
AstNode, Direction, InsertPosition, SyntaxElement, T,
};
@ -72,9 +72,18 @@ fn try_merge_trees(old: &ast::UseTree, new: &ast::UseTree) -> Option<ast::UseTre
let lhs = old.split_prefix(&lhs_prefix);
let rhs = new.split_prefix(&rhs_prefix);
let should_insert_comma = lhs
.use_tree_list()?
.r_curly_token()
.and_then(|it| skip_trivia_token(it.prev_token()?, Direction::Prev))
.map(|it| it.kind() != T![,])
.unwrap_or(true);
let mut to_insert: Vec<SyntaxElement> = Vec::new();
if should_insert_comma {
to_insert.push(make::token(T![,]).into());
to_insert.push(make::tokens::single_space().into());
}
to_insert.extend(
rhs.use_tree_list()?
.syntax()
@ -247,4 +256,22 @@ use {
",
);
}
#[test]
fn test_double_comma() {
check_assist(
merge_imports,
r"
use foo::bar::baz;
use foo::<|>{
FooBar,
};
",
r"
use foo::{<|>
FooBar,
bar::baz};
",
)
}
}

View file

@ -1,20 +1,20 @@
use std::collections::HashMap;
use itertools::Itertools;
use hir::{Adt, ModuleDef, PathResolution, Semantics, Struct};
use itertools::Itertools;
use ra_ide_db::RootDatabase;
use ra_syntax::{
algo, ast,
ast::{Name, Path, RecordLit, RecordPat},
AstNode, SyntaxKind, SyntaxNode,
algo,
ast::{self, Path, RecordLit, RecordPat},
match_ast, AstNode, SyntaxKind,
SyntaxKind::*,
SyntaxNode,
};
use crate::{
assist_ctx::{Assist, AssistCtx},
AssistId,
};
use ra_syntax::ast::{Expr, NameRef};
// Assist: reorder_fields
//
@ -59,7 +59,6 @@ fn reorder<R: AstNode>(ctx: AssistCtx) -> Option<Assist> {
}
fn get_fields_kind(node: &SyntaxNode) -> Vec<SyntaxKind> {
use SyntaxKind::*;
match node.kind() {
RECORD_LIT => vec![RECORD_FIELD],
RECORD_PAT => vec![RECORD_FIELD_PAT, BIND_PAT],
@ -68,19 +67,14 @@ fn get_fields_kind(node: &SyntaxNode) -> Vec<SyntaxKind> {
}
fn get_field_name(node: &SyntaxNode) -> String {
use SyntaxKind::*;
match node.kind() {
RECORD_FIELD => {
if let Some(name) = node.children().find_map(NameRef::cast) {
return name.to_string();
}
node.children().find_map(Expr::cast).map(|expr| expr.to_string()).unwrap_or_default()
}
BIND_PAT | RECORD_FIELD_PAT => {
node.children().find_map(Name::cast).map(|n| n.to_string()).unwrap_or_default()
}
_ => String::new(),
let res = match_ast! {
match node {
ast::RecordField(field) => field.field_name().map(|it| it.to_string()),
ast::RecordFieldPat(field) => field.field_name().map(|it| it.to_string()),
_ => None,
}
};
res.unwrap_or_default()
}
fn get_fields(record: &SyntaxNode) -> Vec<SyntaxNode> {

View file

@ -33,6 +33,7 @@ use crate::{
};
use super::{ExprSource, PatSource};
use ast::AstChildren;
pub(super) fn lower(
db: &dyn DefDatabase,
@ -598,8 +599,8 @@ impl ExprCollector<'_> {
}
ast::Pat::TupleStructPat(p) => {
let path = p.path().and_then(|path| self.expander.parse_path(path));
let args = p.args().map(|p| self.collect_pat(p)).collect();
Pat::TupleStruct { path, args }
let (args, ellipsis) = self.collect_tuple_pat(p.args());
Pat::TupleStruct { path, args, ellipsis }
}
ast::Pat::RefPat(p) => {
let pat = self.collect_pat_opt(p.pat());
@ -616,10 +617,10 @@ impl ExprCollector<'_> {
}
ast::Pat::ParenPat(p) => return self.collect_pat_opt(p.pat()),
ast::Pat::TuplePat(p) => {
let args = p.args().map(|p| self.collect_pat(p)).collect();
Pat::Tuple(args)
let (args, ellipsis) = self.collect_tuple_pat(p.args());
Pat::Tuple { args, ellipsis }
}
ast::Pat::PlaceholderPat(_) | ast::Pat::DotDotPat(_) => Pat::Wild,
ast::Pat::PlaceholderPat(_) => Pat::Wild,
ast::Pat::RecordPat(p) => {
let path = p.path().and_then(|path| self.expander.parse_path(path));
let record_field_pat_list =
@ -637,7 +638,7 @@ impl ExprCollector<'_> {
let iter = record_field_pat_list.record_field_pats().filter_map(|f| {
let ast_pat = f.pat()?;
let pat = self.collect_pat(ast_pat);
let name = f.name()?.as_name();
let name = f.field_name()?.as_name();
Some(RecordFieldPat { name, pat })
});
fields.extend(iter);
@ -665,6 +666,9 @@ impl ExprCollector<'_> {
Pat::Missing
}
}
ast::Pat::DotDotPat(_) => unreachable!(
"`DotDotPat` requires special handling and should not be mapped to a Pat."
),
// FIXME: implement
ast::Pat::BoxPat(_) | ast::Pat::RangePat(_) | ast::Pat::MacroPat(_) => Pat::Missing,
};
@ -679,6 +683,19 @@ impl ExprCollector<'_> {
self.missing_pat()
}
}
fn collect_tuple_pat(&mut self, args: AstChildren<ast::Pat>) -> (Vec<PatId>, Option<usize>) {
// Find the location of the `..`, if there is one. Note that we do not
// consider the possiblity of there being multiple `..` here.
let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::DotDotPat(_)));
// We want to skip the `..` pattern here, since we account for it above.
let args = args
.filter(|p| !matches!(p, ast::Pat::DotDotPat(_)))
.map(|p| self.collect_pat(p))
.collect();
(args, ellipsis)
}
}
impl From<ast::BinOp> for BinaryOp {

View file

@ -374,7 +374,7 @@ pub struct RecordFieldPat {
pub enum Pat {
Missing,
Wild,
Tuple(Vec<PatId>),
Tuple { args: Vec<PatId>, ellipsis: Option<usize> },
Or(Vec<PatId>),
Record { path: Option<Path>, args: Vec<RecordFieldPat>, ellipsis: bool },
Range { start: ExprId, end: ExprId },
@ -382,7 +382,7 @@ pub enum Pat {
Path(Path),
Lit(ExprId),
Bind { mode: BindingAnnotation, name: Name, subpat: Option<PatId> },
TupleStruct { path: Option<Path>, args: Vec<PatId> },
TupleStruct { path: Option<Path>, args: Vec<PatId>, ellipsis: Option<usize> },
Ref { pat: PatId, mutability: Mutability },
}
@ -393,7 +393,7 @@ impl Pat {
Pat::Bind { subpat, .. } => {
subpat.iter().copied().for_each(f);
}
Pat::Or(args) | Pat::Tuple(args) | Pat::TupleStruct { args, .. } => {
Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => {
args.iter().copied().for_each(f);
}
Pat::Ref { pat, .. } => f(*pat),

View file

@ -83,6 +83,15 @@ impl AsName for ast::Name {
}
}
impl AsName for ast::NameOrNameRef {
fn as_name(&self) -> Name {
match self {
ast::NameOrNameRef::Name(it) => it.as_name(),
ast::NameOrNameRef::NameRef(it) => it.as_name(),
}
}
}
impl AsName for tt::Ident {
fn as_name(&self) -> Name {
Name::resolve(&self.text)

View file

@ -289,7 +289,7 @@ impl PatStack {
Self::from_slice(&self.0[1..])
}
fn replace_head_with(&self, pat_ids: &[PatId]) -> PatStack {
fn replace_head_with<T: Into<PatIdOrWild> + Copy>(&self, pat_ids: &[T]) -> PatStack {
let mut patterns: PatStackInner = smallvec![];
for pat in pat_ids {
patterns.push((*pat).into());
@ -320,12 +320,14 @@ impl PatStack {
constructor: &Constructor,
) -> MatchCheckResult<Option<PatStack>> {
let result = match (self.head().as_pat(cx), constructor) {
(Pat::Tuple(ref pat_ids), Constructor::Tuple { arity }) => {
debug_assert_eq!(
pat_ids.len(),
*arity,
"we type check before calling this code, so we should never hit this case",
);
(Pat::Tuple { args: ref pat_ids, ellipsis }, Constructor::Tuple { arity: _ }) => {
if ellipsis.is_some() {
// If there are ellipsis here, we should add the correct number of
// Pat::Wild patterns to `pat_ids`. We should be able to use the
// constructors arity for this, but at the time of writing we aren't
// correctly calculating this arity when ellipsis are present.
return Err(MatchCheckErr::NotImplemented);
}
Some(self.replace_head_with(pat_ids))
}
@ -351,19 +353,47 @@ impl PatStack {
Some(self.to_tail())
}
}
(Pat::TupleStruct { args: ref pat_ids, .. }, Constructor::Enum(enum_constructor)) => {
(
Pat::TupleStruct { args: ref pat_ids, ellipsis, .. },
Constructor::Enum(enum_constructor),
) => {
let pat_id = self.head().as_id().expect("we know this isn't a wild");
if !enum_variant_matches(cx, pat_id, *enum_constructor) {
None
} else {
// If the enum variant matches, then we need to confirm
// that the number of patterns aligns with the expected
// number of patterns for that enum variant.
if pat_ids.len() != constructor.arity(cx)? {
return Err(MatchCheckErr::MalformedMatchArm);
let constructor_arity = constructor.arity(cx)?;
if let Some(ellipsis_position) = ellipsis {
// If there are ellipsis in the pattern, the ellipsis must take the place
// of at least one sub-pattern, so `pat_ids` should be smaller than the
// constructor arity.
if pat_ids.len() < constructor_arity {
let mut new_patterns: Vec<PatIdOrWild> = vec![];
for pat_id in &pat_ids[0..ellipsis_position] {
new_patterns.push((*pat_id).into());
}
for _ in 0..(constructor_arity - pat_ids.len()) {
new_patterns.push(PatIdOrWild::Wild);
}
for pat_id in &pat_ids[ellipsis_position..pat_ids.len()] {
new_patterns.push((*pat_id).into());
}
Some(self.replace_head_with(&new_patterns))
} else {
return Err(MatchCheckErr::MalformedMatchArm);
}
} else {
// If there is no ellipsis in the tuple pattern, the number
// of patterns must equal the constructor arity.
if pat_ids.len() == constructor_arity {
Some(self.replace_head_with(pat_ids))
} else {
return Err(MatchCheckErr::MalformedMatchArm);
}
}
}
}
(Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented),
@ -644,7 +674,11 @@ impl Constructor {
fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Option<Constructor>> {
let res = match pat.as_pat(cx) {
Pat::Wild => None,
Pat::Tuple(pats) => Some(Constructor::Tuple { arity: pats.len() }),
// FIXME somehow create the Tuple constructor with the proper arity. If there are
// ellipsis, the arity is not equal to the number of patterns.
Pat::Tuple { args: pats, ellipsis } if ellipsis.is_none() => {
Some(Constructor::Tuple { arity: pats.len() })
}
Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] {
Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)),
_ => return Err(MatchCheckErr::NotImplemented),
@ -972,6 +1006,47 @@ mod tests {
check_no_diagnostic(content);
}
#[test]
fn tuple_of_bools_with_ellipsis_at_end_no_diagnostic() {
let content = r"
fn test_fn() {
match (false, true, false) {
(false, ..) => {},
(true, ..) => {},
}
}
";
check_no_diagnostic(content);
}
#[test]
fn tuple_of_bools_with_ellipsis_at_beginning_no_diagnostic() {
let content = r"
fn test_fn() {
match (false, true, false) {
(.., false) => {},
(.., true) => {},
}
}
";
check_no_diagnostic(content);
}
#[test]
fn tuple_of_bools_with_ellipsis_no_diagnostic() {
let content = r"
fn test_fn() {
match (false, true, false) {
(..) => {},
}
}
";
check_no_diagnostic(content);
}
#[test]
fn tuple_of_tuple_and_bools_no_arms() {
let content = r"
@ -1315,8 +1390,9 @@ mod tests {
}
";
// Match arms with the incorrect type are filtered out.
check_diagnostic(content);
// Match statements with arms that don't match the
// expression pattern do not fire this diagnostic.
check_no_diagnostic(content);
}
#[test]
@ -1330,8 +1406,9 @@ mod tests {
}
";
// Match arms with the incorrect type are filtered out.
check_diagnostic(content);
// Match statements with arms that don't match the
// expression pattern do not fire this diagnostic.
check_no_diagnostic(content);
}
#[test]
@ -1344,8 +1421,9 @@ mod tests {
}
";
// Match arms with the incorrect type are filtered out.
check_diagnostic(content);
// Match statements with arms that don't match the
// expression pattern do not fire this diagnostic.
check_no_diagnostic(content);
}
#[test]
@ -1383,6 +1461,163 @@ mod tests {
// we don't create a diagnostic).
check_no_diagnostic(content);
}
#[test]
fn expr_diverges() {
let content = r"
enum Either {
A,
B,
}
fn test_fn() {
match loop {} {
Either::A => (),
Either::B => (),
}
}
";
check_no_diagnostic(content);
}
#[test]
fn expr_loop_with_break() {
let content = r"
enum Either {
A,
B,
}
fn test_fn() {
match loop { break Foo::A } {
Either::A => (),
Either::B => (),
}
}
";
check_no_diagnostic(content);
}
#[test]
fn expr_partially_diverges() {
let content = r"
enum Either<T> {
A(T),
B,
}
fn foo() -> Either<!> {
Either::B
}
fn test_fn() -> u32 {
match foo() {
Either::A(val) => val,
Either::B => 0,
}
}
";
check_no_diagnostic(content);
}
#[test]
fn enum_tuple_partial_ellipsis_no_diagnostic() {
let content = r"
enum Either {
A(bool, bool, bool, bool),
B,
}
fn test_fn() {
match Either::B {
Either::A(true, .., true) => {},
Either::A(true, .., false) => {},
Either::A(false, .., true) => {},
Either::A(false, .., false) => {},
Either::B => {},
}
}
";
check_no_diagnostic(content);
}
#[test]
fn enum_tuple_partial_ellipsis_2_no_diagnostic() {
let content = r"
enum Either {
A(bool, bool, bool, bool),
B,
}
fn test_fn() {
match Either::B {
Either::A(true, .., true) => {},
Either::A(true, .., false) => {},
Either::A(.., true) => {},
Either::A(.., false) => {},
Either::B => {},
}
}
";
check_no_diagnostic(content);
}
#[test]
fn enum_tuple_partial_ellipsis_missing_arm() {
let content = r"
enum Either {
A(bool, bool, bool, bool),
B,
}
fn test_fn() {
match Either::B {
Either::A(true, .., true) => {},
Either::A(true, .., false) => {},
Either::A(false, .., false) => {},
Either::B => {},
}
}
";
check_diagnostic(content);
}
#[test]
fn enum_tuple_partial_ellipsis_2_missing_arm() {
let content = r"
enum Either {
A(bool, bool, bool, bool),
B,
}
fn test_fn() {
match Either::B {
Either::A(true, .., true) => {},
Either::A(true, .., false) => {},
Either::A(.., true) => {},
Either::B => {},
}
}
";
check_diagnostic(content);
}
#[test]
fn enum_tuple_ellipsis_no_diagnostic() {
let content = r"
enum Either {
A(bool, bool, bool, bool),
B,
}
fn test_fn() {
match Either::B {
Either::A(..) => {},
Either::B => {},
}
}
";
check_no_diagnostic(content);
}
}
#[cfg(test)]
@ -1452,4 +1687,75 @@ mod false_negatives {
// We do not currently handle patterns with internal `or`s.
check_no_diagnostic(content);
}
#[test]
fn expr_diverges_missing_arm() {
let content = r"
enum Either {
A,
B,
}
fn test_fn() {
match loop {} {
Either::A => (),
}
}
";
// This is a false negative.
// Even though the match expression diverges, rustc fails
// to compile here since `Either::B` is missing.
check_no_diagnostic(content);
}
#[test]
fn expr_loop_missing_arm() {
let content = r"
enum Either {
A,
B,
}
fn test_fn() {
match loop { break Foo::A } {
Either::A => (),
}
}
";
// This is a false negative.
// We currently infer the type of `loop { break Foo::A }` to `!`, which
// causes us to skip the diagnostic since `Either::A` doesn't type check
// with `!`.
check_no_diagnostic(content);
}
#[test]
fn tuple_of_bools_with_ellipsis_at_end_missing_arm() {
let content = r"
fn test_fn() {
match (false, true, false) {
(false, ..) => {},
}
}
";
// This is a false negative.
// We don't currently handle tuple patterns with ellipsis.
check_no_diagnostic(content);
}
#[test]
fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() {
let content = r"
fn test_fn() {
match (false, true, false) {
(.., false) => {},
}
}
";
// This is a false negative.
// We don't currently handle tuple patterns with ellipsis.
check_no_diagnostic(content);
}
}

View file

@ -161,12 +161,6 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
let mut seen = Matrix::empty();
for pat in pats {
// We skip any patterns whose type we cannot resolve.
//
// This could lead to false positives in this diagnostic, so
// it might be better to skip the entire diagnostic if we either
// cannot resolve a match arm or determine that the match arm has
// the wrong type.
if let Some(pat_ty) = infer.type_of_pat.get(pat) {
// We only include patterns whose type matches the type
// of the match expression. If we had a InvalidMatchArmPattern
@ -189,8 +183,15 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
// to the matrix here.
let v = PatStack::from_pattern(pat);
seen.push(&cx, v);
continue;
}
}
// If we can't resolve the type of a pattern, or the pattern type doesn't
// fit the match expression, we skip this diagnostic. Skipping the entire
// diagnostic rather than just not including this match arm is preferred
// to avoid the chance of false positives.
return;
}
match is_useful(&cx, &seen, &PatStack::from_wild()) {

View file

@ -85,7 +85,7 @@ impl<'a> InferenceContext<'a> {
let body = Arc::clone(&self.body); // avoid borrow checker problem
let is_non_ref_pat = match &body[pat] {
Pat::Tuple(..)
Pat::Tuple { .. }
| Pat::Or(..)
| Pat::TupleStruct { .. }
| Pat::Record { .. }
@ -116,7 +116,7 @@ impl<'a> InferenceContext<'a> {
let expected = expected;
let ty = match &body[pat] {
Pat::Tuple(ref args) => {
Pat::Tuple { ref args, .. } => {
let expectations = match expected.as_tuple() {
Some(parameters) => &*parameters.0,
_ => &[],
@ -155,7 +155,7 @@ impl<'a> InferenceContext<'a> {
let subty = self.infer_pat(*pat, expectation, default_bm);
Ty::apply_one(TypeCtor::Ref(*mutability), subty)
}
Pat::TupleStruct { path: p, args: subpats } => {
Pat::TupleStruct { path: p, args: subpats, .. } => {
self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm, pat)
}
Pat::Record { path: p, args: fields, ellipsis: _ } => {

View file

@ -1,7 +1,8 @@
use super::{infer, infer_with_mismatches};
use insta::assert_snapshot;
use test_utils::covers;
use super::{infer, infer_with_mismatches};
#[test]
fn infer_pattern() {
assert_snapshot!(

View file

@ -208,9 +208,20 @@ mod tests {
}
}
fn call_info(text: &str) -> CallInfo {
fn call_info_helper(text: &str) -> Option<CallInfo> {
let (analysis, position) = single_file_with_position(text);
analysis.call_info(position).unwrap().unwrap()
analysis.call_info(position).unwrap()
}
fn call_info(text: &str) -> CallInfo {
let info = call_info_helper(text);
assert!(info.is_some());
info.unwrap()
}
fn no_call_info(text: &str) {
let info = call_info_helper(text);
assert!(info.is_none());
}
#[test]
@ -558,9 +569,8 @@ fn main() {
}
#[test]
#[should_panic]
fn cant_call_named_structs() {
let _ = call_info(
no_call_info(
r#"
struct TS { x: u32, y: i32 }
fn main() {
@ -594,9 +604,8 @@ fn main() {
}
#[test]
#[should_panic]
fn cant_call_enum_records() {
let _ = call_info(
no_call_info(
r#"
enum E {
/// A Variant

View file

@ -7,6 +7,10 @@ pub(super) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) {
if !ctx.is_pat_binding_or_const {
return;
}
if ctx.record_pat_syntax.is_some() {
return;
}
// FIXME: ideally, we should look at the type we are matching against and
// suggest variants + auto-imports
ctx.scope().process_all_names(&mut |name, res| {

View file

@ -2,7 +2,7 @@
use crate::completion::{CompletionContext, Completions};
pub(super) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> {
let missing_fields = match (ctx.record_lit_pat.as_ref(), ctx.record_lit_syntax.as_ref()) {
let missing_fields = match (ctx.record_pat_syntax.as_ref(), ctx.record_lit_syntax.as_ref()) {
(None, None) => return None,
(Some(_), Some(_)) => unreachable!("A record cannot be both a literal and a pattern"),
(Some(record_pat), _) => ctx.sema.record_pattern_missing_fields(record_pat),

View file

@ -3,7 +3,14 @@
use crate::completion::{CompletionContext, Completions};
pub(super) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionContext) {
if !(ctx.is_trivial_path && !ctx.is_pat_binding_or_const && !ctx.record_lit_syntax.is_some()) {
if !ctx.is_trivial_path {
return;
}
if ctx.is_pat_binding_or_const
|| ctx.record_lit_syntax.is_some()
|| ctx.record_pat_syntax.is_some()
{
return;
}

View file

@ -31,7 +31,7 @@ pub(crate) struct CompletionContext<'a> {
pub(super) function_syntax: Option<ast::FnDef>,
pub(super) use_item_syntax: Option<ast::UseItem>,
pub(super) record_lit_syntax: Option<ast::RecordLit>,
pub(super) record_lit_pat: Option<ast::RecordPat>,
pub(super) record_pat_syntax: Option<ast::RecordPat>,
pub(super) impl_def: Option<ast::ImplDef>,
pub(super) call_info: Option<CallInfo>,
pub(super) is_param: bool,
@ -97,7 +97,7 @@ impl<'a> CompletionContext<'a> {
call_info: None,
use_item_syntax: None,
record_lit_syntax: None,
record_lit_pat: None,
record_pat_syntax: None,
impl_def: None,
is_param: false,
is_pat_binding_or_const: false,
@ -186,6 +186,11 @@ impl<'a> CompletionContext<'a> {
self.is_param = true;
return;
}
// FIXME: remove this (V) duplication and make the check more precise
if name_ref.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() {
self.record_pat_syntax =
self.sema.find_node_at_offset_with_macros(&original_file, offset);
}
self.classify_name_ref(original_file, name_ref, offset);
}
@ -215,8 +220,9 @@ impl<'a> CompletionContext<'a> {
self.is_param = true;
return;
}
// FIXME: remove this (^) duplication and make the check more precise
if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() {
self.record_lit_pat =
self.record_pat_syntax =
self.sema.find_node_at_offset_with_macros(&original_file, offset);
}
}

View file

@ -282,13 +282,10 @@ fn name_ref(p: &mut Parser) {
}
fn name_ref_or_index(p: &mut Parser) {
if p.at(IDENT) || p.at(INT_NUMBER) {
assert!(p.at(IDENT) || p.at(INT_NUMBER));
let m = p.start();
p.bump_any();
m.complete(p, NAME_REF);
} else {
p.err_and_bump("expected identifier");
}
}
fn error_block(p: &mut Parser, message: &str) {

View file

@ -192,16 +192,32 @@ fn record_field_pat_list(p: &mut Parser) {
match p.current() {
// A trailing `..` is *not* treated as a DOT_DOT_PAT.
T![.] if p.at(T![..]) => p.bump(T![..]),
IDENT | INT_NUMBER if p.nth(1) == T![:] => record_field_pat(p),
T!['{'] => error_block(p, "expected ident"),
c => {
let m = p.start();
match c {
// test record_field_pat
// fn foo() {
// let S { 0: 1 } = ();
// let S { x: 1 } = ();
// }
IDENT | INT_NUMBER if p.nth(1) == T![:] => {
name_ref_or_index(p);
p.bump(T![:]);
pattern(p);
}
T![box] => {
// FIXME: not all box patterns should be allowed
box_pat(p);
}
_ => {
bind_pat(p, false);
}
}
m.complete(p, RECORD_FIELD_PAT);
}
}
if !p.at(T!['}']) {
p.expect(T![,]);
}
@ -210,26 +226,6 @@ fn record_field_pat_list(p: &mut Parser) {
m.complete(p, RECORD_FIELD_PAT_LIST);
}
// test record_field_pat
// fn foo() {
// let S { 0: 1 } = ();
// let S { x: 1 } = ();
// }
fn record_field_pat(p: &mut Parser) {
assert!(p.at(IDENT) || p.at(INT_NUMBER));
assert!(p.nth(1) == T![:]);
let m = p.start();
if !p.eat(INT_NUMBER) {
name(p)
}
p.bump_any();
pattern(p);
m.complete(p, RECORD_FIELD_PAT);
}
// test placeholder_pat
// fn main() { let _ = (); }
fn placeholder_pat(p: &mut Parser) -> CompletedMarker {

View file

@ -18,8 +18,8 @@ use crate::{
pub use self::{
expr_extensions::{ArrayExprKind, BinOp, ElseBranch, LiteralKind, PrefixOp, RangeOp},
extensions::{
AttrKind, FieldKind, PathSegmentKind, SelfParamKind, SlicePatComponents, StructKind,
TypeBoundKind, VisibilityKind,
AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents,
StructKind, TypeBoundKind, VisibilityKind,
},
generated::{nodes::*, tokens::*},
tokens::*,

View file

@ -1,6 +1,8 @@
//! Various extension methods to ast Nodes, which are hard to code-generate.
//! Extensions for various expressions live in a sibling `expr_extensions` module.
use std::fmt;
use itertools::Itertools;
use ra_parser::SyntaxKind;
@ -217,6 +219,34 @@ impl ast::RecordField {
}
}
pub enum NameOrNameRef {
Name(ast::Name),
NameRef(ast::NameRef),
}
impl fmt::Display for NameOrNameRef {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
NameOrNameRef::Name(it) => fmt::Display::fmt(it, f),
NameOrNameRef::NameRef(it) => fmt::Display::fmt(it, f),
}
}
}
impl ast::RecordFieldPat {
/// Deals with field init shorthand
pub fn field_name(&self) -> Option<NameOrNameRef> {
if let Some(name_ref) = self.name_ref() {
return Some(NameOrNameRef::NameRef(name_ref));
}
if let Some(ast::Pat::BindPat(pat)) = self.pat() {
let name = pat.name()?;
return Some(NameOrNameRef::Name(name));
}
None
}
}
impl ast::EnumVariant {
pub fn parent_enum(&self) -> ast::EnumDef {
self.syntax()

View file

@ -1806,8 +1806,8 @@ impl AstNode for RecordFieldPat {
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl ast::AttrsOwner for RecordFieldPat {}
impl ast::NameOwner for RecordFieldPat {}
impl RecordFieldPat {
pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
}

View file

@ -44,11 +44,13 @@ SOURCE_FILE@[0; 119)
RECORD_FIELD_PAT_LIST@[40; 56)
L_CURLY@[40; 41) "{"
WHITESPACE@[41; 42) " "
RECORD_FIELD_PAT@[42; 43)
BIND_PAT@[42; 43)
NAME@[42; 43)
IDENT@[42; 43) "f"
COMMA@[43; 44) ","
WHITESPACE@[44; 45) " "
RECORD_FIELD_PAT@[45; 54)
BIND_PAT@[45; 54)
REF_KW@[45; 48) "ref"
WHITESPACE@[48; 49) " "
@ -79,7 +81,7 @@ SOURCE_FILE@[0; 119)
L_CURLY@[73; 74) "{"
WHITESPACE@[74; 75) " "
RECORD_FIELD_PAT@[75; 79)
NAME@[75; 76)
NAME_REF@[75; 76)
IDENT@[75; 76) "h"
COLON@[76; 77) ":"
WHITESPACE@[77; 78) " "
@ -110,7 +112,7 @@ SOURCE_FILE@[0; 119)
L_CURLY@[101; 102) "{"
WHITESPACE@[102; 103) " "
RECORD_FIELD_PAT@[103; 107)
NAME@[103; 104)
NAME_REF@[103; 104)
IDENT@[103; 104) "h"
COLON@[104; 105) ":"
WHITESPACE@[105; 106) " "

View file

@ -44,6 +44,7 @@ SOURCE_FILE@[0; 118)
RECORD_FIELD_PAT_LIST@[50; 81)
L_CURLY@[50; 51) "{"
WHITESPACE@[51; 52) " "
RECORD_FIELD_PAT@[52; 57)
BOX_PAT@[52; 57)
BOX_KW@[52; 55) "box"
WHITESPACE@[55; 56) " "
@ -53,7 +54,7 @@ SOURCE_FILE@[0; 118)
COMMA@[57; 58) ","
WHITESPACE@[58; 59) " "
RECORD_FIELD_PAT@[59; 79)
NAME@[59; 60)
NAME_REF@[59; 60)
IDENT@[59; 60) "j"
COLON@[60; 61) ":"
WHITESPACE@[61; 62) " "

View file

@ -25,6 +25,7 @@ SOURCE_FILE@[0; 63)
L_CURLY@[21; 22) "{"
WHITESPACE@[22; 23) " "
RECORD_FIELD_PAT@[23; 27)
NAME_REF@[23; 24)
INT_NUMBER@[23; 24) "0"
COLON@[24; 25) ":"
WHITESPACE@[25; 26) " "
@ -54,7 +55,7 @@ SOURCE_FILE@[0; 63)
L_CURLY@[46; 47) "{"
WHITESPACE@[47; 48) " "
RECORD_FIELD_PAT@[48; 52)
NAME@[48; 49)
NAME_REF@[48; 49)
IDENT@[48; 49) "x"
COLON@[49; 50) ":"
WHITESPACE@[50; 51) " "

View file

@ -68,11 +68,13 @@ SOURCE_FILE@[0; 170)
RECORD_FIELD_PAT_LIST@[59; 67)
L_CURLY@[59; 60) "{"
WHITESPACE@[60; 61) " "
RECORD_FIELD_PAT@[61; 62)
BIND_PAT@[61; 62)
NAME@[61; 62)
IDENT@[61; 62) "a"
COMMA@[62; 63) ","
WHITESPACE@[63; 64) " "
RECORD_FIELD_PAT@[64; 65)
BIND_PAT@[64; 65)
NAME@[64; 65)
IDENT@[64; 65) "b"

View file

@ -71,11 +71,13 @@ SOURCE_FILE@[0; 137)
RECORD_FIELD_PAT_LIST@[58; 66)
L_CURLY@[58; 59) "{"
WHITESPACE@[59; 60) " "
RECORD_FIELD_PAT@[60; 61)
BIND_PAT@[60; 61)
NAME@[60; 61)
IDENT@[60; 61) "a"
COMMA@[61; 62) ","
WHITESPACE@[62; 63) " "
RECORD_FIELD_PAT@[63; 64)
BIND_PAT@[63; 64)
NAME@[63; 64)
IDENT@[63; 64) "b"

View file

@ -511,7 +511,7 @@ pub(crate) const AST_SRC: AstSrc = AstSrc {
T![..],
T!['}']
}
struct RecordFieldPat: AttrsOwner, NameOwner { T![:], Pat }
struct RecordFieldPat: AttrsOwner { NameRef, T![:], Pat }
struct TupleStructPat { Path, T!['('], args: [Pat], T![')'] }
struct TuplePat { T!['('], args: [Pat], T![')'] }