mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 22:31:43 +00:00
feat: add attributes support on struct fields and method #3870
Signed-off-by: Benjamin Coenen <5719034+bnjjj@users.noreply.github.com>
This commit is contained in:
commit
585bb83e2a
28 changed files with 522 additions and 336 deletions
4
.github/workflows/release.yaml
vendored
4
.github/workflows/release.yaml
vendored
|
@ -50,11 +50,11 @@ jobs:
|
||||||
|
|
||||||
- name: Dist
|
- name: Dist
|
||||||
if: matrix.os == 'ubuntu-latest' && github.ref == 'refs/heads/release'
|
if: matrix.os == 'ubuntu-latest' && github.ref == 'refs/heads/release'
|
||||||
run: cargo xtask dist --client --version 0.2.$GITHUB_RUN_NUMBER --tag $(date --iso --utc)
|
run: cargo xtask dist --client 0.2.$GITHUB_RUN_NUMBER
|
||||||
|
|
||||||
- name: Dist
|
- name: Dist
|
||||||
if: matrix.os == 'ubuntu-latest' && github.ref != 'refs/heads/release'
|
if: matrix.os == 'ubuntu-latest' && github.ref != 'refs/heads/release'
|
||||||
run: cargo xtask dist --client --version 0.3.$GITHUB_RUN_NUMBER-nightly --tag nightly
|
run: cargo xtask dist --nightly --client 0.3.$GITHUB_RUN_NUMBER-nightly
|
||||||
|
|
||||||
- name: Dist
|
- name: Dist
|
||||||
if: matrix.os != 'ubuntu-latest'
|
if: matrix.os != 'ubuntu-latest'
|
||||||
|
|
|
@ -1027,8 +1027,16 @@ impl Type {
|
||||||
ty: Ty,
|
ty: Ty,
|
||||||
) -> Option<Type> {
|
) -> Option<Type> {
|
||||||
let krate = resolver.krate()?;
|
let krate = resolver.krate()?;
|
||||||
|
Some(Type::new_with_resolver_inner(db, krate, resolver, ty))
|
||||||
|
}
|
||||||
|
pub(crate) fn new_with_resolver_inner(
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
krate: CrateId,
|
||||||
|
resolver: &Resolver,
|
||||||
|
ty: Ty,
|
||||||
|
) -> Type {
|
||||||
let environment = TraitEnvironment::lower(db, &resolver);
|
let environment = TraitEnvironment::lower(db, &resolver);
|
||||||
Some(Type { krate, ty: InEnvironment { value: ty, environment } })
|
Type { krate, ty: InEnvironment { value: ty, environment } }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type {
|
fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type {
|
||||||
|
@ -1152,27 +1160,6 @@ impl Type {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn variant_fields(
|
|
||||||
&self,
|
|
||||||
db: &dyn HirDatabase,
|
|
||||||
def: VariantDef,
|
|
||||||
) -> Vec<(StructField, Type)> {
|
|
||||||
// FIXME: check that ty and def match
|
|
||||||
match &self.ty.value {
|
|
||||||
Ty::Apply(a_ty) => {
|
|
||||||
let field_types = db.field_types(def.into());
|
|
||||||
def.fields(db)
|
|
||||||
.into_iter()
|
|
||||||
.map(|it| {
|
|
||||||
let ty = field_types[it.id].clone().subst(&a_ty.parameters);
|
|
||||||
(it, self.derived(ty))
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
_ => Vec::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
|
pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
|
||||||
// There should be no inference vars in types passed here
|
// There should be no inference vars in types passed here
|
||||||
// FIXME check that?
|
// FIXME check that?
|
||||||
|
|
|
@ -23,7 +23,7 @@ use crate::{
|
||||||
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
|
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
|
||||||
source_analyzer::{resolve_hir_path, SourceAnalyzer},
|
source_analyzer::{resolve_hir_path, SourceAnalyzer},
|
||||||
AssocItem, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, Name,
|
AssocItem, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, Name,
|
||||||
Origin, Path, ScopeDef, StructField, Trait, Type, TypeParam, VariantDef,
|
Origin, Path, ScopeDef, StructField, Trait, Type, TypeParam,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
@ -187,14 +187,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
self.analyze(field.syntax()).resolve_record_field(self.db, field)
|
self.analyze(field.syntax()).resolve_record_field(self.db, field)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<VariantDef> {
|
|
||||||
self.analyze(record_lit.syntax()).resolve_record_literal(self.db, record_lit)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<VariantDef> {
|
|
||||||
self.analyze(record_pat.syntax()).resolve_record_pattern(record_pat)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
|
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
|
||||||
let sa = self.analyze(macro_call.syntax());
|
let sa = self.analyze(macro_call.syntax());
|
||||||
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
|
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
|
||||||
|
@ -212,6 +204,24 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
||||||
// FIXME: use this instead?
|
// FIXME: use this instead?
|
||||||
// pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
|
// pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
|
||||||
|
|
||||||
|
pub fn record_literal_missing_fields(
|
||||||
|
&self,
|
||||||
|
literal: &ast::RecordLit,
|
||||||
|
) -> Vec<(StructField, Type)> {
|
||||||
|
self.analyze(literal.syntax())
|
||||||
|
.record_literal_missing_fields(self.db, literal)
|
||||||
|
.unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn record_pattern_missing_fields(
|
||||||
|
&self,
|
||||||
|
pattern: &ast::RecordPat,
|
||||||
|
) -> Vec<(StructField, Type)> {
|
||||||
|
self.analyze(pattern.syntax())
|
||||||
|
.record_pattern_missing_fields(self.db, pattern)
|
||||||
|
.unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
|
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
|
||||||
let src = self.find_file(src.syntax().clone()).with_value(src).cloned();
|
let src = self.find_file(src.syntax().clone()).with_value(src).cloned();
|
||||||
T::to_def(self, src)
|
T::to_def(self, src)
|
||||||
|
|
|
@ -14,10 +14,13 @@ use hir_def::{
|
||||||
},
|
},
|
||||||
expr::{ExprId, Pat, PatId},
|
expr::{ExprId, Pat, PatId},
|
||||||
resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
|
resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
|
||||||
AsMacroCall, DefWithBodyId,
|
AsMacroCall, DefWithBodyId, LocalStructFieldId, StructFieldId, VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
|
use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
|
||||||
use hir_ty::InferenceResult;
|
use hir_ty::{
|
||||||
|
expr::{record_literal_missing_fields, record_pattern_missing_fields},
|
||||||
|
InferenceResult, Substs, Ty,
|
||||||
|
};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
SyntaxNode, SyntaxNodePtr, TextUnit,
|
SyntaxNode, SyntaxNodePtr, TextUnit,
|
||||||
|
@ -25,8 +28,10 @@ use ra_syntax::{
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Function, Local, MacroDef,
|
db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Function, Local, MacroDef,
|
||||||
ModPath, ModuleDef, Path, PathKind, Static, Struct, Trait, Type, TypeAlias, TypeParam,
|
ModPath, ModuleDef, Path, PathKind, Static, Struct, StructField, Trait, Type, TypeAlias,
|
||||||
|
TypeParam,
|
||||||
};
|
};
|
||||||
|
use ra_db::CrateId;
|
||||||
|
|
||||||
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
|
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
|
||||||
/// original source files. It should not be used inside the HIR itself.
|
/// original source files. It should not be used inside the HIR itself.
|
||||||
|
@ -164,23 +169,6 @@ impl SourceAnalyzer {
|
||||||
Some((struct_field.into(), local))
|
Some((struct_field.into(), local))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolve_record_literal(
|
|
||||||
&self,
|
|
||||||
db: &dyn HirDatabase,
|
|
||||||
record_lit: &ast::RecordLit,
|
|
||||||
) -> Option<crate::VariantDef> {
|
|
||||||
let expr_id = self.expr_id(db, &record_lit.clone().into())?;
|
|
||||||
self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn resolve_record_pattern(
|
|
||||||
&self,
|
|
||||||
record_pat: &ast::RecordPat,
|
|
||||||
) -> Option<crate::VariantDef> {
|
|
||||||
let pat_id = self.pat_id(&record_pat.clone().into())?;
|
|
||||||
self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn resolve_macro_call(
|
pub(crate) fn resolve_macro_call(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
|
@ -231,6 +219,68 @@ impl SourceAnalyzer {
|
||||||
resolve_hir_path(db, &self.resolver, &hir_path)
|
resolve_hir_path(db, &self.resolver, &hir_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn record_literal_missing_fields(
|
||||||
|
&self,
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
literal: &ast::RecordLit,
|
||||||
|
) -> Option<Vec<(StructField, Type)>> {
|
||||||
|
let krate = self.resolver.krate()?;
|
||||||
|
let body = self.body.as_ref()?;
|
||||||
|
let infer = self.infer.as_ref()?;
|
||||||
|
|
||||||
|
let expr_id = self.expr_id(db, &literal.clone().into())?;
|
||||||
|
let substs = match &infer.type_of_expr[expr_id] {
|
||||||
|
Ty::Apply(a_ty) => &a_ty.parameters,
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let (variant, missing_fields, _exhaustive) =
|
||||||
|
record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
|
||||||
|
let res = self.missing_fields(db, krate, substs, variant, missing_fields);
|
||||||
|
Some(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn record_pattern_missing_fields(
|
||||||
|
&self,
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
pattern: &ast::RecordPat,
|
||||||
|
) -> Option<Vec<(StructField, Type)>> {
|
||||||
|
let krate = self.resolver.krate()?;
|
||||||
|
let body = self.body.as_ref()?;
|
||||||
|
let infer = self.infer.as_ref()?;
|
||||||
|
|
||||||
|
let pat_id = self.pat_id(&pattern.clone().into())?;
|
||||||
|
let substs = match &infer.type_of_pat[pat_id] {
|
||||||
|
Ty::Apply(a_ty) => &a_ty.parameters,
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let (variant, missing_fields) =
|
||||||
|
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
|
||||||
|
let res = self.missing_fields(db, krate, substs, variant, missing_fields);
|
||||||
|
Some(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn missing_fields(
|
||||||
|
&self,
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
krate: CrateId,
|
||||||
|
substs: &Substs,
|
||||||
|
variant: VariantId,
|
||||||
|
missing_fields: Vec<LocalStructFieldId>,
|
||||||
|
) -> Vec<(StructField, Type)> {
|
||||||
|
let field_types = db.field_types(variant);
|
||||||
|
|
||||||
|
missing_fields
|
||||||
|
.into_iter()
|
||||||
|
.map(|local_id| {
|
||||||
|
let field = StructFieldId { parent: variant, local_id };
|
||||||
|
let ty = field_types[local_id].clone().subst(substs);
|
||||||
|
(field.into(), Type::new_with_resolver_inner(db, krate, &self.resolver, ty))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn expand(
|
pub(crate) fn expand(
|
||||||
&self,
|
&self,
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
|
|
|
@ -235,7 +235,10 @@ impl From<PatId> for PatIdOrWild {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||||
pub struct MatchCheckNotImplemented;
|
pub enum MatchCheckErr {
|
||||||
|
NotImplemented,
|
||||||
|
MalformedMatchArm,
|
||||||
|
}
|
||||||
|
|
||||||
/// The return type of `is_useful` is either an indication of usefulness
|
/// The return type of `is_useful` is either an indication of usefulness
|
||||||
/// of the match arm, or an error in the case the match statement
|
/// of the match arm, or an error in the case the match statement
|
||||||
|
@ -244,7 +247,7 @@ pub struct MatchCheckNotImplemented;
|
||||||
///
|
///
|
||||||
/// The `std::result::Result` type is used here rather than a custom enum
|
/// The `std::result::Result` type is used here rather than a custom enum
|
||||||
/// to allow the use of `?`.
|
/// to allow the use of `?`.
|
||||||
pub type MatchCheckResult<T> = Result<T, MatchCheckNotImplemented>;
|
pub type MatchCheckResult<T> = Result<T, MatchCheckErr>;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// A row in a Matrix.
|
/// A row in a Matrix.
|
||||||
|
@ -335,12 +338,12 @@ impl PatStack {
|
||||||
Expr::Literal(Literal::Bool(_)) => None,
|
Expr::Literal(Literal::Bool(_)) => None,
|
||||||
// perhaps this is actually unreachable given we have
|
// perhaps this is actually unreachable given we have
|
||||||
// already checked that these match arms have the appropriate type?
|
// already checked that these match arms have the appropriate type?
|
||||||
_ => return Err(MatchCheckNotImplemented),
|
_ => return Err(MatchCheckErr::NotImplemented),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?),
|
(Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?),
|
||||||
(Pat::Path(_), Constructor::Enum(constructor)) => {
|
(Pat::Path(_), Constructor::Enum(constructor)) => {
|
||||||
// enums with no associated data become `Pat::Path`
|
// unit enum variants become `Pat::Path`
|
||||||
let pat_id = self.head().as_id().expect("we know this isn't a wild");
|
let pat_id = self.head().as_id().expect("we know this isn't a wild");
|
||||||
if !enum_variant_matches(cx, pat_id, *constructor) {
|
if !enum_variant_matches(cx, pat_id, *constructor) {
|
||||||
None
|
None
|
||||||
|
@ -348,16 +351,23 @@ impl PatStack {
|
||||||
Some(self.to_tail())
|
Some(self.to_tail())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(Pat::TupleStruct { args: ref pat_ids, .. }, Constructor::Enum(constructor)) => {
|
(Pat::TupleStruct { args: ref pat_ids, .. }, Constructor::Enum(enum_constructor)) => {
|
||||||
let pat_id = self.head().as_id().expect("we know this isn't a wild");
|
let pat_id = self.head().as_id().expect("we know this isn't a wild");
|
||||||
if !enum_variant_matches(cx, pat_id, *constructor) {
|
if !enum_variant_matches(cx, pat_id, *enum_constructor) {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
|
// If the enum variant matches, then we need to confirm
|
||||||
|
// that the number of patterns aligns with the expected
|
||||||
|
// number of patterns for that enum variant.
|
||||||
|
if pat_ids.len() != constructor.arity(cx)? {
|
||||||
|
return Err(MatchCheckErr::MalformedMatchArm);
|
||||||
|
}
|
||||||
|
|
||||||
Some(self.replace_head_with(pat_ids))
|
Some(self.replace_head_with(pat_ids))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(Pat::Or(_), _) => return Err(MatchCheckNotImplemented),
|
(Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented),
|
||||||
(_, _) => return Err(MatchCheckNotImplemented),
|
(_, _) => return Err(MatchCheckErr::NotImplemented),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(result)
|
Ok(result)
|
||||||
|
@ -514,7 +524,7 @@ pub(crate) fn is_useful(
|
||||||
return if any_useful {
|
return if any_useful {
|
||||||
Ok(Usefulness::Useful)
|
Ok(Usefulness::Useful)
|
||||||
} else if found_unimplemented {
|
} else if found_unimplemented {
|
||||||
Err(MatchCheckNotImplemented)
|
Err(MatchCheckErr::NotImplemented)
|
||||||
} else {
|
} else {
|
||||||
Ok(Usefulness::NotUseful)
|
Ok(Usefulness::NotUseful)
|
||||||
};
|
};
|
||||||
|
@ -567,7 +577,7 @@ pub(crate) fn is_useful(
|
||||||
}
|
}
|
||||||
|
|
||||||
if found_unimplemented {
|
if found_unimplemented {
|
||||||
Err(MatchCheckNotImplemented)
|
Err(MatchCheckErr::NotImplemented)
|
||||||
} else {
|
} else {
|
||||||
Ok(Usefulness::NotUseful)
|
Ok(Usefulness::NotUseful)
|
||||||
}
|
}
|
||||||
|
@ -604,7 +614,7 @@ impl Constructor {
|
||||||
match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() {
|
match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() {
|
||||||
VariantData::Tuple(struct_field_data) => struct_field_data.len(),
|
VariantData::Tuple(struct_field_data) => struct_field_data.len(),
|
||||||
VariantData::Unit => 0,
|
VariantData::Unit => 0,
|
||||||
_ => return Err(MatchCheckNotImplemented),
|
_ => return Err(MatchCheckErr::NotImplemented),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -637,20 +647,20 @@ fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Opt
|
||||||
Pat::Tuple(pats) => Some(Constructor::Tuple { arity: pats.len() }),
|
Pat::Tuple(pats) => Some(Constructor::Tuple { arity: pats.len() }),
|
||||||
Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] {
|
Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] {
|
||||||
Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)),
|
Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)),
|
||||||
_ => return Err(MatchCheckNotImplemented),
|
_ => return Err(MatchCheckErr::NotImplemented),
|
||||||
},
|
},
|
||||||
Pat::TupleStruct { .. } | Pat::Path(_) => {
|
Pat::TupleStruct { .. } | Pat::Path(_) => {
|
||||||
let pat_id = pat.as_id().expect("we already know this pattern is not a wild");
|
let pat_id = pat.as_id().expect("we already know this pattern is not a wild");
|
||||||
let variant_id =
|
let variant_id =
|
||||||
cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckNotImplemented)?;
|
cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::NotImplemented)?;
|
||||||
match variant_id {
|
match variant_id {
|
||||||
VariantId::EnumVariantId(enum_variant_id) => {
|
VariantId::EnumVariantId(enum_variant_id) => {
|
||||||
Some(Constructor::Enum(enum_variant_id))
|
Some(Constructor::Enum(enum_variant_id))
|
||||||
}
|
}
|
||||||
_ => return Err(MatchCheckNotImplemented),
|
_ => return Err(MatchCheckErr::NotImplemented),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => return Err(MatchCheckNotImplemented),
|
_ => return Err(MatchCheckErr::NotImplemented),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(res)
|
Ok(res)
|
||||||
|
@ -1324,6 +1334,40 @@ mod tests {
|
||||||
check_diagnostic(content);
|
check_diagnostic(content);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn malformed_match_arm_tuple_missing_pattern() {
|
||||||
|
let content = r"
|
||||||
|
fn test_fn() {
|
||||||
|
match (0) {
|
||||||
|
() => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
";
|
||||||
|
|
||||||
|
// Match arms with the incorrect type are filtered out.
|
||||||
|
check_diagnostic(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn malformed_match_arm_tuple_enum_missing_pattern() {
|
||||||
|
let content = r"
|
||||||
|
enum Either {
|
||||||
|
A,
|
||||||
|
B(u32),
|
||||||
|
}
|
||||||
|
fn test_fn() {
|
||||||
|
match Either::A {
|
||||||
|
Either::A => (),
|
||||||
|
Either::B() => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
";
|
||||||
|
|
||||||
|
// We are testing to be sure we don't panic here when the match
|
||||||
|
// arm `Either::B` is missing its pattern.
|
||||||
|
check_no_diagnostic(content);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn enum_not_in_scope() {
|
fn enum_not_in_scope() {
|
||||||
let content = r"
|
let content = r"
|
||||||
|
|
|
@ -2,12 +2,8 @@
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use hir_def::{
|
use hir_def::{path::path, resolver::HasResolver, AdtId, FunctionId};
|
||||||
path::{path, Path},
|
use hir_expand::diagnostics::DiagnosticSink;
|
||||||
resolver::HasResolver,
|
|
||||||
AdtId, FunctionId,
|
|
||||||
};
|
|
||||||
use hir_expand::{diagnostics::DiagnosticSink, name::Name};
|
|
||||||
use ra_syntax::{ast, AstPtr};
|
use ra_syntax::{ast, AstPtr};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
|
@ -28,7 +24,7 @@ pub use hir_def::{
|
||||||
ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
|
ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
|
||||||
MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
|
MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
|
||||||
},
|
},
|
||||||
VariantId,
|
LocalStructFieldId, VariantId,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub struct ExprValidator<'a, 'b: 'a> {
|
pub struct ExprValidator<'a, 'b: 'a> {
|
||||||
|
@ -49,14 +45,37 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
||||||
pub fn validate_body(&mut self, db: &dyn HirDatabase) {
|
pub fn validate_body(&mut self, db: &dyn HirDatabase) {
|
||||||
let body = db.body(self.func.into());
|
let body = db.body(self.func.into());
|
||||||
|
|
||||||
for e in body.exprs.iter() {
|
for (id, expr) in body.exprs.iter() {
|
||||||
if let (id, Expr::RecordLit { path, fields, spread }) = e {
|
if let Some((variant_def, missed_fields, true)) =
|
||||||
self.validate_record_literal(id, path, fields, *spread, db);
|
record_literal_missing_fields(db, &self.infer, id, expr)
|
||||||
} else if let (id, Expr::Match { expr, arms }) = e {
|
{
|
||||||
|
// XXX: only look at source_map if we do have missing fields
|
||||||
|
let (_, source_map) = db.body_with_source_map(self.func.into());
|
||||||
|
|
||||||
|
if let Ok(source_ptr) = source_map.expr_syntax(id) {
|
||||||
|
if let Some(expr) = source_ptr.value.left() {
|
||||||
|
let root = source_ptr.file_syntax(db.upcast());
|
||||||
|
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
|
||||||
|
if let Some(field_list) = record_lit.record_field_list() {
|
||||||
|
let variant_data = variant_data(db.upcast(), variant_def);
|
||||||
|
let missed_fields = missed_fields
|
||||||
|
.into_iter()
|
||||||
|
.map(|idx| variant_data.fields()[idx].name.clone())
|
||||||
|
.collect();
|
||||||
|
self.sink.push(MissingFields {
|
||||||
|
file: source_ptr.file_id,
|
||||||
|
field_list: AstPtr::new(&field_list),
|
||||||
|
missed_fields,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Expr::Match { expr, arms } = expr {
|
||||||
self.validate_match(id, *expr, arms, db, self.infer.clone());
|
self.validate_match(id, *expr, arms, db, self.infer.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let body_expr = &body[body.body_expr];
|
let body_expr = &body[body.body_expr];
|
||||||
if let Expr::Block { tail: Some(t), .. } = body_expr {
|
if let Expr::Block { tail: Some(t), .. } = body_expr {
|
||||||
self.validate_results_in_tail_expr(body.body_expr, *t, db);
|
self.validate_results_in_tail_expr(body.body_expr, *t, db);
|
||||||
|
@ -145,61 +164,6 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn validate_record_literal(
|
|
||||||
&mut self,
|
|
||||||
id: ExprId,
|
|
||||||
_path: &Option<Path>,
|
|
||||||
fields: &[RecordLitField],
|
|
||||||
spread: Option<ExprId>,
|
|
||||||
db: &dyn HirDatabase,
|
|
||||||
) {
|
|
||||||
if spread.is_some() {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
let variant_def: VariantId = match self.infer.variant_resolution_for_expr(id) {
|
|
||||||
Some(VariantId::UnionId(_)) | None => return,
|
|
||||||
Some(it) => it,
|
|
||||||
};
|
|
||||||
if let VariantId::UnionId(_) = variant_def {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let variant_data = variant_data(db.upcast(), variant_def);
|
|
||||||
|
|
||||||
let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
|
|
||||||
let missed_fields: Vec<Name> = variant_data
|
|
||||||
.fields()
|
|
||||||
.iter()
|
|
||||||
.filter_map(|(_f, d)| {
|
|
||||||
let name = d.name.clone();
|
|
||||||
if lit_fields.contains(&name) {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(name)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
if missed_fields.is_empty() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let (_, source_map) = db.body_with_source_map(self.func.into());
|
|
||||||
|
|
||||||
if let Ok(source_ptr) = source_map.expr_syntax(id) {
|
|
||||||
if let Some(expr) = source_ptr.value.left() {
|
|
||||||
let root = source_ptr.file_syntax(db.upcast());
|
|
||||||
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
|
|
||||||
if let Some(field_list) = record_lit.record_field_list() {
|
|
||||||
self.sink.push(MissingFields {
|
|
||||||
file: source_ptr.file_id,
|
|
||||||
field_list: AstPtr::new(&field_list),
|
|
||||||
missed_fields,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) {
|
fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) {
|
||||||
// the mismatch will be on the whole block currently
|
// the mismatch will be on the whole block currently
|
||||||
let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
|
let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
|
||||||
|
@ -232,3 +196,63 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn record_literal_missing_fields(
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
infer: &InferenceResult,
|
||||||
|
id: ExprId,
|
||||||
|
expr: &Expr,
|
||||||
|
) -> Option<(VariantId, Vec<LocalStructFieldId>, /*exhaustive*/ bool)> {
|
||||||
|
let (fields, exhausitve) = match expr {
|
||||||
|
Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()),
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let variant_def = infer.variant_resolution_for_expr(id)?;
|
||||||
|
if let VariantId::UnionId(_) = variant_def {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let variant_data = variant_data(db.upcast(), variant_def);
|
||||||
|
|
||||||
|
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
|
||||||
|
let missed_fields: Vec<LocalStructFieldId> = variant_data
|
||||||
|
.fields()
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
|
||||||
|
.collect();
|
||||||
|
if missed_fields.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
Some((variant_def, missed_fields, exhausitve))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn record_pattern_missing_fields(
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
infer: &InferenceResult,
|
||||||
|
id: PatId,
|
||||||
|
pat: &Pat,
|
||||||
|
) -> Option<(VariantId, Vec<LocalStructFieldId>)> {
|
||||||
|
let fields = match pat {
|
||||||
|
Pat::Record { path: _, args } => args,
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let variant_def = infer.variant_resolution_for_pat(id)?;
|
||||||
|
if let VariantId::UnionId(_) = variant_def {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let variant_data = variant_data(db.upcast(), variant_def);
|
||||||
|
|
||||||
|
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
|
||||||
|
let missed_fields: Vec<LocalStructFieldId> = variant_data
|
||||||
|
.fields()
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
|
||||||
|
.collect();
|
||||||
|
if missed_fields.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
Some((variant_def, missed_fields))
|
||||||
|
}
|
||||||
|
|
|
@ -1,65 +1,24 @@
|
||||||
//! Complete fields in record literals and patterns.
|
//! Complete fields in record literals and patterns.
|
||||||
use ra_syntax::{ast, ast::NameOwner, SmolStr};
|
|
||||||
|
|
||||||
use crate::completion::{CompletionContext, Completions};
|
use crate::completion::{CompletionContext, Completions};
|
||||||
|
|
||||||
pub(super) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> {
|
pub(super) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> {
|
||||||
let (ty, variant, already_present_fields) =
|
let missing_fields = match (ctx.record_lit_pat.as_ref(), ctx.record_lit_syntax.as_ref()) {
|
||||||
match (ctx.record_lit_pat.as_ref(), ctx.record_lit_syntax.as_ref()) {
|
|
||||||
(None, None) => return None,
|
(None, None) => return None,
|
||||||
(Some(_), Some(_)) => unreachable!("A record cannot be both a literal and a pattern"),
|
(Some(_), Some(_)) => unreachable!("A record cannot be both a literal and a pattern"),
|
||||||
(Some(record_pat), _) => (
|
(Some(record_pat), _) => ctx.sema.record_pattern_missing_fields(record_pat),
|
||||||
ctx.sema.type_of_pat(&record_pat.clone().into())?,
|
(_, Some(record_lit)) => ctx.sema.record_literal_missing_fields(record_lit),
|
||||||
ctx.sema.resolve_record_pattern(record_pat)?,
|
|
||||||
pattern_ascribed_fields(record_pat),
|
|
||||||
),
|
|
||||||
(_, Some(record_lit)) => (
|
|
||||||
ctx.sema.type_of_expr(&record_lit.clone().into())?,
|
|
||||||
ctx.sema.resolve_record_literal(record_lit)?,
|
|
||||||
literal_ascribed_fields(record_lit),
|
|
||||||
),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
for (field, field_ty) in ty.variant_fields(ctx.db, variant).into_iter().filter(|(field, _)| {
|
for (field, ty) in missing_fields {
|
||||||
// FIXME: already_present_names better be `Vec<hir::Name>`
|
acc.add_field(ctx, field, &ty)
|
||||||
!already_present_fields.contains(&SmolStr::from(field.name(ctx.db).to_string()))
|
|
||||||
}) {
|
|
||||||
acc.add_field(ctx, field, &field_ty);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(())
|
Some(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn literal_ascribed_fields(record_lit: &ast::RecordLit) -> Vec<SmolStr> {
|
|
||||||
record_lit
|
|
||||||
.record_field_list()
|
|
||||||
.map(|field_list| field_list.fields())
|
|
||||||
.map(|fields| {
|
|
||||||
fields
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|field| field.name_ref())
|
|
||||||
.map(|name_ref| name_ref.text().clone())
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
.unwrap_or_default()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn pattern_ascribed_fields(record_pat: &ast::RecordPat) -> Vec<SmolStr> {
|
|
||||||
record_pat
|
|
||||||
.record_field_pat_list()
|
|
||||||
.map(|pat_list| {
|
|
||||||
pat_list
|
|
||||||
.record_field_pats()
|
|
||||||
.filter_map(|fild_pat| fild_pat.name())
|
|
||||||
.chain(pat_list.bind_pats().filter_map(|bind_pat| bind_pat.name()))
|
|
||||||
.map(|name| name.text().clone())
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
.unwrap_or_default()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
mod record_lit_tests {
|
mod record_pat_tests {
|
||||||
use insta::assert_debug_snapshot;
|
use insta::assert_debug_snapshot;
|
||||||
|
|
||||||
use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
|
use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
|
||||||
|
@ -205,7 +164,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mod record_pat_tests {
|
mod record_lit_tests {
|
||||||
use insta::assert_debug_snapshot;
|
use insta::assert_debug_snapshot;
|
||||||
|
|
||||||
use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
|
use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
|
||||||
|
@ -410,5 +369,38 @@ mod tests {
|
||||||
]
|
]
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn completes_functional_update() {
|
||||||
|
let completions = complete(
|
||||||
|
r"
|
||||||
|
struct S {
|
||||||
|
foo1: u32,
|
||||||
|
foo2: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let foo1 = 1;
|
||||||
|
let s = S {
|
||||||
|
foo1,
|
||||||
|
<|>
|
||||||
|
.. loop {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
",
|
||||||
|
);
|
||||||
|
assert_debug_snapshot!(completions, @r###"
|
||||||
|
[
|
||||||
|
CompletionItem {
|
||||||
|
label: "foo2",
|
||||||
|
source_range: [221; 221),
|
||||||
|
delete: [221; 221),
|
||||||
|
insert: "foo2",
|
||||||
|
kind: Field,
|
||||||
|
detail: "u32",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
39
crates/ra_ide/src/snapshots/highlight_injection.html
Normal file
39
crates/ra_ide/src/snapshots/highlight_injection.html
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
|
||||||
|
<style>
|
||||||
|
body { margin: 0; }
|
||||||
|
pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
|
||||||
|
|
||||||
|
.lifetime { color: #DFAF8F; font-style: italic; }
|
||||||
|
.comment { color: #7F9F7F; }
|
||||||
|
.struct, .enum { color: #7CB8BB; }
|
||||||
|
.enum_variant { color: #BDE0F3; }
|
||||||
|
.string_literal { color: #CC9393; }
|
||||||
|
.field { color: #94BFF3; }
|
||||||
|
.function { color: #93E0E3; }
|
||||||
|
.parameter { color: #94BFF3; }
|
||||||
|
.text { color: #DCDCCC; }
|
||||||
|
.type { color: #7CB8BB; }
|
||||||
|
.builtin_type { color: #8CD0D3; }
|
||||||
|
.type_param { color: #DFAF8F; }
|
||||||
|
.attribute { color: #94BFF3; }
|
||||||
|
.numeric_literal { color: #BFEBBF; }
|
||||||
|
.macro { color: #94BFF3; }
|
||||||
|
.module { color: #AFD8AF; }
|
||||||
|
.variable { color: #DCDCCC; }
|
||||||
|
.mutable { text-decoration: underline; }
|
||||||
|
|
||||||
|
.keyword { color: #F0DFAF; font-weight: bold; }
|
||||||
|
.keyword.unsafe { color: #BC8383; font-weight: bold; }
|
||||||
|
.control { font-style: italic; }
|
||||||
|
</style>
|
||||||
|
<pre><code><span class="keyword">fn</span> <span class="function declaration">fixture</span>(<span class="variable declaration">ra_fixture</span>: &<span class="builtin_type">str</span>) {}
|
||||||
|
|
||||||
|
<span class="keyword">fn</span> <span class="function declaration">main</span>() {
|
||||||
|
<span class="function">fixture</span>(<span class="string_literal">r#"</span>
|
||||||
|
<span class="keyword">trait</span> <span class="trait declaration">Foo</span> {
|
||||||
|
<span class="keyword">fn</span> <span class="function declaration">foo</span>() {
|
||||||
|
<span class="macro">println!</span>(<span class="string_literal">"2 + 2 = {}"</span>, <span class="numeric_literal">4</span>);
|
||||||
|
}
|
||||||
|
}<span class="string_literal">"#</span>
|
||||||
|
);
|
||||||
|
}</code></pre>
|
|
@ -26,7 +26,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
||||||
.keyword.unsafe { color: #BC8383; font-weight: bold; }
|
.keyword.unsafe { color: #BC8383; font-weight: bold; }
|
||||||
.control { font-style: italic; }
|
.control { font-style: italic; }
|
||||||
</style>
|
</style>
|
||||||
<pre><code><span class="attribute">#</span><span class="attribute">[</span><span class="attribute">derive</span><span class="attribute">(</span><span class="attribute">Clone</span><span class="attribute">,</span><span class="attribute"> </span><span class="attribute">Debug</span><span class="attribute">)</span><span class="attribute">]</span>
|
<pre><code><span class="attribute">#[derive(Clone, Debug)]</span>
|
||||||
<span class="keyword">struct</span> <span class="struct declaration">Foo</span> {
|
<span class="keyword">struct</span> <span class="struct declaration">Foo</span> {
|
||||||
<span class="keyword">pub</span> <span class="field declaration">x</span>: <span class="builtin_type">i32</span>,
|
<span class="keyword">pub</span> <span class="field declaration">x</span>: <span class="builtin_type">i32</span>,
|
||||||
<span class="keyword">pub</span> <span class="field declaration">y</span>: <span class="builtin_type">i32</span>,
|
<span class="keyword">pub</span> <span class="field declaration">y</span>: <span class="builtin_type">i32</span>,
|
||||||
|
@ -36,11 +36,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
||||||
<span class="function">foo</span>::<<span class="lifetime">'a</span>, <span class="builtin_type">i32</span>>()
|
<span class="function">foo</span>::<<span class="lifetime">'a</span>, <span class="builtin_type">i32</span>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
<span class="macro">macro_rules</span><span class="macro">!</span> def_fn {
|
<span class="macro">macro_rules!</span> def_fn {
|
||||||
($($tt:tt)*) => {$($tt)*}
|
($($tt:tt)*) => {$($tt)*}
|
||||||
}
|
}
|
||||||
|
|
||||||
<span class="macro">def_fn</span><span class="macro">!</span> {
|
<span class="macro">def_fn!</span> {
|
||||||
<span class="keyword">fn</span> <span class="function declaration">bar</span>() -> <span class="builtin_type">u32</span> {
|
<span class="keyword">fn</span> <span class="function declaration">bar</span>() -> <span class="builtin_type">u32</span> {
|
||||||
<span class="numeric_literal">100</span>
|
<span class="numeric_literal">100</span>
|
||||||
}
|
}
|
||||||
|
@ -48,7 +48,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
||||||
|
|
||||||
<span class="comment">// comment</span>
|
<span class="comment">// comment</span>
|
||||||
<span class="keyword">fn</span> <span class="function declaration">main</span>() {
|
<span class="keyword">fn</span> <span class="function declaration">main</span>() {
|
||||||
<span class="macro">println</span><span class="macro">!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>);
|
<span class="macro">println!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>);
|
||||||
|
|
||||||
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = Vec::new();
|
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = Vec::new();
|
||||||
<span class="keyword control">if</span> <span class="keyword">true</span> {
|
<span class="keyword control">if</span> <span class="keyword">true</span> {
|
||||||
|
@ -73,7 +73,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
||||||
<span class="keyword">impl</span><<span class="type_param declaration">T</span>> <span class="enum">Option</span><<span class="type_param">T</span>> {
|
<span class="keyword">impl</span><<span class="type_param declaration">T</span>> <span class="enum">Option</span><<span class="type_param">T</span>> {
|
||||||
<span class="keyword">fn</span> <span class="function declaration">and</span><<span class="type_param declaration">U</span>>(<span class="keyword">self</span>, <span class="variable declaration">other</span>: <span class="enum">Option</span><<span class="type_param">U</span>>) -> <span class="enum">Option</span><(<span class="type_param">T</span>, <span class="type_param">U</span>)> {
|
<span class="keyword">fn</span> <span class="function declaration">and</span><<span class="type_param declaration">U</span>>(<span class="keyword">self</span>, <span class="variable declaration">other</span>: <span class="enum">Option</span><<span class="type_param">U</span>>) -> <span class="enum">Option</span><(<span class="type_param">T</span>, <span class="type_param">U</span>)> {
|
||||||
<span class="keyword control">match</span> <span class="variable">other</span> {
|
<span class="keyword control">match</span> <span class="variable">other</span> {
|
||||||
<span class="enum_variant">None</span> => <span class="macro">unimplemented</span><span class="macro">!</span>(),
|
<span class="enum_variant">None</span> => <span class="macro">unimplemented!</span>(),
|
||||||
<span class="variable declaration">Nope</span> => <span class="variable">Nope</span>,
|
<span class="variable declaration">Nope</span> => <span class="variable">Nope</span>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ use crate::{call_info::call_info_for_token, Analysis, FileId};
|
||||||
pub(crate) use html::highlight_as_html;
|
pub(crate) use html::highlight_as_html;
|
||||||
pub use tags::{Highlight, HighlightModifier, HighlightModifiers, HighlightTag};
|
pub use tags::{Highlight, HighlightModifier, HighlightModifiers, HighlightTag};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct HighlightedRange {
|
pub struct HighlightedRange {
|
||||||
pub range: TextRange,
|
pub range: TextRange,
|
||||||
pub highlight: Highlight,
|
pub highlight: Highlight,
|
||||||
|
@ -55,13 +55,55 @@ pub(crate) fn highlight(
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
|
let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
|
||||||
let mut res = Vec::new();
|
// We use a stack for the DFS traversal below.
|
||||||
|
// When we leave a node, the we use it to flatten the highlighted ranges.
|
||||||
|
let mut res: Vec<Vec<HighlightedRange>> = vec![Vec::new()];
|
||||||
|
|
||||||
let mut current_macro_call: Option<ast::MacroCall> = None;
|
let mut current_macro_call: Option<ast::MacroCall> = None;
|
||||||
|
|
||||||
// Walk all nodes, keeping track of whether we are inside a macro or not.
|
// Walk all nodes, keeping track of whether we are inside a macro or not.
|
||||||
// If in macro, expand it first and highlight the expanded code.
|
// If in macro, expand it first and highlight the expanded code.
|
||||||
for event in root.preorder_with_tokens() {
|
for event in root.preorder_with_tokens() {
|
||||||
|
match &event {
|
||||||
|
WalkEvent::Enter(_) => res.push(Vec::new()),
|
||||||
|
WalkEvent::Leave(_) => {
|
||||||
|
/* Flattens the highlighted ranges.
|
||||||
|
*
|
||||||
|
* For example `#[cfg(feature = "foo")]` contains the nested ranges:
|
||||||
|
* 1) parent-range: Attribute [0, 23)
|
||||||
|
* 2) child-range: String [16, 21)
|
||||||
|
*
|
||||||
|
* The following code implements the flattening, for our example this results to:
|
||||||
|
* `[Attribute [0, 16), String [16, 21), Attribute [21, 23)]`
|
||||||
|
*/
|
||||||
|
let children = res.pop().unwrap();
|
||||||
|
let prev = res.last_mut().unwrap();
|
||||||
|
let needs_flattening = !children.is_empty()
|
||||||
|
&& !prev.is_empty()
|
||||||
|
&& children.first().unwrap().range.is_subrange(&prev.last().unwrap().range);
|
||||||
|
if !needs_flattening {
|
||||||
|
prev.extend(children);
|
||||||
|
} else {
|
||||||
|
let mut parent = prev.pop().unwrap();
|
||||||
|
for ele in children {
|
||||||
|
assert!(ele.range.is_subrange(&parent.range));
|
||||||
|
let mut cloned = parent.clone();
|
||||||
|
parent.range = TextRange::from_to(parent.range.start(), ele.range.start());
|
||||||
|
cloned.range = TextRange::from_to(ele.range.end(), cloned.range.end());
|
||||||
|
if !parent.range.is_empty() {
|
||||||
|
prev.push(parent);
|
||||||
|
}
|
||||||
|
prev.push(ele);
|
||||||
|
parent = cloned;
|
||||||
|
}
|
||||||
|
if !parent.range.is_empty() {
|
||||||
|
prev.push(parent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let current = res.last_mut().expect("during DFS traversal, the stack must not be empty");
|
||||||
|
|
||||||
let event_range = match &event {
|
let event_range = match &event {
|
||||||
WalkEvent::Enter(it) => it.text_range(),
|
WalkEvent::Enter(it) => it.text_range(),
|
||||||
WalkEvent::Leave(it) => it.text_range(),
|
WalkEvent::Leave(it) => it.text_range(),
|
||||||
|
@ -77,7 +119,7 @@ pub(crate) fn highlight(
|
||||||
WalkEvent::Enter(Some(mc)) => {
|
WalkEvent::Enter(Some(mc)) => {
|
||||||
current_macro_call = Some(mc.clone());
|
current_macro_call = Some(mc.clone());
|
||||||
if let Some(range) = macro_call_range(&mc) {
|
if let Some(range) = macro_call_range(&mc) {
|
||||||
res.push(HighlightedRange {
|
current.push(HighlightedRange {
|
||||||
range,
|
range,
|
||||||
highlight: HighlightTag::Macro.into(),
|
highlight: HighlightTag::Macro.into(),
|
||||||
binding_hash: None,
|
binding_hash: None,
|
||||||
|
@ -119,7 +161,7 @@ pub(crate) fn highlight(
|
||||||
|
|
||||||
if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) {
|
if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) {
|
||||||
let expanded = element_to_highlight.as_token().unwrap().clone();
|
let expanded = element_to_highlight.as_token().unwrap().clone();
|
||||||
if highlight_injection(&mut res, &sema, token, expanded).is_some() {
|
if highlight_injection(current, &sema, token, expanded).is_some() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -127,10 +169,17 @@ pub(crate) fn highlight(
|
||||||
if let Some((highlight, binding_hash)) =
|
if let Some((highlight, binding_hash)) =
|
||||||
highlight_element(&sema, &mut bindings_shadow_count, element_to_highlight)
|
highlight_element(&sema, &mut bindings_shadow_count, element_to_highlight)
|
||||||
{
|
{
|
||||||
res.push(HighlightedRange { range, highlight, binding_hash });
|
current.push(HighlightedRange { range, highlight, binding_hash });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
assert_eq!(res.len(), 1, "after DFS traversal, the stack should only contain a single element");
|
||||||
|
let res = res.pop().unwrap();
|
||||||
|
// Check that ranges are sorted and disjoint
|
||||||
|
assert!(res
|
||||||
|
.iter()
|
||||||
|
.zip(res.iter().skip(1))
|
||||||
|
.all(|(left, right)| left.range.end() <= right.range.start()));
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
//! Renders a bit of code as HTML.
|
//! Renders a bit of code as HTML.
|
||||||
|
|
||||||
use ra_db::SourceDatabase;
|
use ra_db::SourceDatabase;
|
||||||
use ra_syntax::AstNode;
|
use ra_syntax::{AstNode, TextUnit};
|
||||||
|
|
||||||
use crate::{FileId, HighlightedRange, RootDatabase};
|
use crate::{FileId, RootDatabase};
|
||||||
|
|
||||||
use super::highlight;
|
use super::highlight;
|
||||||
|
|
||||||
|
@ -21,51 +21,35 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut ranges = highlight(db, file_id, None);
|
let ranges = highlight(db, file_id, None);
|
||||||
ranges.sort_by_key(|it| it.range.start());
|
let text = parse.tree().syntax().to_string();
|
||||||
// quick non-optimal heuristic to intersect token ranges and highlighted ranges
|
let mut prev_pos = TextUnit::from(0);
|
||||||
let mut frontier = 0;
|
|
||||||
let mut could_intersect: Vec<&HighlightedRange> = Vec::new();
|
|
||||||
|
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
buf.push_str(&STYLE);
|
buf.push_str(&STYLE);
|
||||||
buf.push_str("<pre><code>");
|
buf.push_str("<pre><code>");
|
||||||
let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.into_token());
|
for range in &ranges {
|
||||||
for token in tokens {
|
if range.range.start() > prev_pos {
|
||||||
could_intersect.retain(|it| token.text_range().start() <= it.range.end());
|
let curr = &text[prev_pos.to_usize()..range.range.start().to_usize()];
|
||||||
while let Some(r) = ranges.get(frontier) {
|
let text = html_escape(curr);
|
||||||
if r.range.start() <= token.text_range().end() {
|
|
||||||
could_intersect.push(r);
|
|
||||||
frontier += 1;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let text = html_escape(&token.text());
|
|
||||||
let ranges = could_intersect
|
|
||||||
.iter()
|
|
||||||
.filter(|it| token.text_range().is_subrange(&it.range))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
if ranges.is_empty() {
|
|
||||||
buf.push_str(&text);
|
buf.push_str(&text);
|
||||||
} else {
|
}
|
||||||
let classes = ranges
|
let curr = &text[range.range.start().to_usize()..range.range.end().to_usize()];
|
||||||
.iter()
|
|
||||||
.map(|it| it.highlight.to_string().replace('.', " "))
|
let class = range.highlight.to_string().replace('.', " ");
|
||||||
.collect::<Vec<_>>()
|
let color = match (rainbow, range.binding_hash) {
|
||||||
.join(" ");
|
(true, Some(hash)) => {
|
||||||
let binding_hash = ranges.first().and_then(|x| x.binding_hash);
|
format!(" data-binding-hash=\"{}\" style=\"color: {};\"", hash, rainbowify(hash))
|
||||||
let color = match (rainbow, binding_hash) {
|
}
|
||||||
(true, Some(hash)) => format!(
|
|
||||||
" data-binding-hash=\"{}\" style=\"color: {};\"",
|
|
||||||
hash,
|
|
||||||
rainbowify(hash)
|
|
||||||
),
|
|
||||||
_ => "".into(),
|
_ => "".into(),
|
||||||
};
|
};
|
||||||
buf.push_str(&format!("<span class=\"{}\"{}>{}</span>", classes, color, text));
|
buf.push_str(&format!("<span class=\"{}\"{}>{}</span>", class, color, html_escape(curr)));
|
||||||
}
|
|
||||||
|
prev_pos = range.range.end();
|
||||||
}
|
}
|
||||||
|
// Add the remaining (non-highlighted) text
|
||||||
|
let curr = &text[prev_pos.to_usize()..];
|
||||||
|
let text = html_escape(curr);
|
||||||
|
buf.push_str(&text);
|
||||||
buf.push_str("</code></pre>");
|
buf.push_str("</code></pre>");
|
||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
|
|
|
@ -131,3 +131,28 @@ fn test_ranges() {
|
||||||
|
|
||||||
assert_eq!(&highlights[0].highlight.to_string(), "field.declaration");
|
assert_eq!(&highlights[0].highlight.to_string(), "field.declaration");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_flattening() {
|
||||||
|
let (analysis, file_id) = single_file(
|
||||||
|
r##"
|
||||||
|
fn fixture(ra_fixture: &str) {}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
fixture(r#"
|
||||||
|
trait Foo {
|
||||||
|
fn foo() {
|
||||||
|
println!("2 + 2 = {}", 4);
|
||||||
|
}
|
||||||
|
}"#
|
||||||
|
);
|
||||||
|
}"##
|
||||||
|
.trim(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let dst_file = project_dir().join("crates/ra_ide/src/snapshots/highlight_injection.html");
|
||||||
|
let actual_html = &analysis.highlight_as_html(file_id, false).unwrap();
|
||||||
|
let expected_html = &read_text(&dst_file);
|
||||||
|
fs::write(dst_file, &actual_html).unwrap();
|
||||||
|
assert_eq_text!(expected_html, actual_html);
|
||||||
|
}
|
||||||
|
|
|
@ -1614,6 +1614,23 @@ fn test_issue_2520() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_issue_3861() {
|
||||||
|
let macro_fixture = parse_macro(
|
||||||
|
r#"
|
||||||
|
macro_rules! rgb_color {
|
||||||
|
($p:expr, $t: ty) => {
|
||||||
|
pub fn new() {
|
||||||
|
let _ = 0 as $t << $p;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
macro_fixture.expand_items(r#"rgb_color!(8 + 8, u32);"#);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_repeat_bad_var() {
|
fn test_repeat_bad_var() {
|
||||||
// FIXME: the second rule of the macro should be removed and an error about
|
// FIXME: the second rule of the macro should be removed and an error about
|
||||||
|
|
|
@ -7,7 +7,7 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![
|
||||||
DYN_KW, L_ANGLE,
|
DYN_KW, L_ANGLE,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA];
|
const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA, L_DOLLAR];
|
||||||
|
|
||||||
pub(crate) fn type_(p: &mut Parser) {
|
pub(crate) fn type_(p: &mut Parser) {
|
||||||
type_with_bounds_cond(p, true);
|
type_with_bounds_cond(p, true);
|
||||||
|
|
|
@ -54,12 +54,14 @@ pub struct Diagnostic {
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! diagnostic_child_methods {
|
macro_rules! diagnostic_child_methods {
|
||||||
($spanned:ident, $regular:ident, $level:expr) => (
|
($spanned:ident, $regular:ident, $level:expr) => {
|
||||||
/// Adds a new child diagnostic message to `self` with the level
|
/// Adds a new child diagnostic message to `self` with the level
|
||||||
/// identified by this method's name with the given `spans` and
|
/// identified by this method's name with the given `spans` and
|
||||||
/// `message`.
|
/// `message`.
|
||||||
pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
|
pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
|
||||||
where S: MultiSpan, T: Into<String>
|
where
|
||||||
|
S: MultiSpan,
|
||||||
|
T: Into<String>,
|
||||||
{
|
{
|
||||||
self.children.push(Diagnostic::spanned(spans, $level, message));
|
self.children.push(Diagnostic::spanned(spans, $level, message));
|
||||||
self
|
self
|
||||||
|
@ -71,7 +73,7 @@ macro_rules! diagnostic_child_methods {
|
||||||
self.children.push(Diagnostic::new($level, message));
|
self.children.push(Diagnostic::new($level, message));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
)
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterator over the children diagnostics of a `Diagnostic`.
|
/// Iterator over the children diagnostics of a `Diagnostic`.
|
||||||
|
|
|
@ -169,13 +169,13 @@ pub mod token_stream {
|
||||||
pub struct Span(bridge::client::Span);
|
pub struct Span(bridge::client::Span);
|
||||||
|
|
||||||
macro_rules! diagnostic_method {
|
macro_rules! diagnostic_method {
|
||||||
($name:ident, $level:expr) => (
|
($name:ident, $level:expr) => {
|
||||||
/// Creates a new `Diagnostic` with the given `message` at the span
|
/// Creates a new `Diagnostic` with the given `message` at the span
|
||||||
/// `self`.
|
/// `self`.
|
||||||
pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
|
pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
|
||||||
Diagnostic::spanned(self, $level, message)
|
Diagnostic::spanned(self, $level, message)
|
||||||
}
|
}
|
||||||
)
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Span {
|
impl Span {
|
||||||
|
|
|
@ -316,7 +316,7 @@ impl<'a> SyntaxRewriter<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ops::AddAssign for SyntaxRewriter<'_> {
|
impl ops::AddAssign for SyntaxRewriter<'_> {
|
||||||
fn add_assign(&mut self, rhs: SyntaxRewriter) {
|
fn add_assign(&mut self, rhs: SyntaxRewriter) {
|
||||||
assert!(rhs.f.is_none());
|
assert!(rhs.f.is_none());
|
||||||
self.replacements.extend(rhs.replacements)
|
self.replacements.extend(rhs.replacements)
|
||||||
|
|
|
@ -187,30 +187,7 @@ Prerequisites:
|
||||||
|
|
||||||
`LSP` package.
|
`LSP` package.
|
||||||
|
|
||||||
Installation:
|
Invoke the command palette (`ctrl+shift+p`) and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer)
|
||||||
|
|
||||||
1. Invoke the command palette with <kbd>Ctrl+Shift+P</kbd>
|
|
||||||
2. Type `LSP Settings` to open the LSP preferences editor
|
|
||||||
3. Add the following LSP client definition to your settings:
|
|
||||||
+
|
|
||||||
[source,json]
|
|
||||||
----
|
|
||||||
"rust-analyzer": {
|
|
||||||
"command": ["rust-analyzer"],
|
|
||||||
"languageId": "rust",
|
|
||||||
"scopes": ["source.rust"],
|
|
||||||
"syntaxes": [
|
|
||||||
"Packages/Rust/Rust.sublime-syntax",
|
|
||||||
"Packages/Rust Enhanced/RustEnhanced.sublime-syntax"
|
|
||||||
],
|
|
||||||
"initializationOptions": {
|
|
||||||
"featureFlags": {
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
----
|
|
||||||
|
|
||||||
4. You can now invoke the command palette and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer)
|
|
||||||
|
|
||||||
== Usage
|
== Usage
|
||||||
|
|
||||||
|
|
44
editors/code/package-lock.json
generated
44
editors/code/package-lock.json
generated
|
@ -115,25 +115,25 @@
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"@typescript-eslint/eslint-plugin": {
|
"@typescript-eslint/eslint-plugin": {
|
||||||
"version": "2.26.0",
|
"version": "2.27.0",
|
||||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.26.0.tgz",
|
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.27.0.tgz",
|
||||||
"integrity": "sha512-4yUnLv40bzfzsXcTAtZyTjbiGUXMrcIJcIMioI22tSOyAxpdXiZ4r7YQUU8Jj6XXrLz9d5aMHPQf5JFR7h27Nw==",
|
"integrity": "sha512-/my+vVHRN7zYgcp0n4z5A6HAK7bvKGBiswaM5zIlOQczsxj/aiD7RcgD+dvVFuwFaGh5+kM7XA6Q6PN0bvb1tw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"requires": {
|
"requires": {
|
||||||
"@typescript-eslint/experimental-utils": "2.26.0",
|
"@typescript-eslint/experimental-utils": "2.27.0",
|
||||||
"functional-red-black-tree": "^1.0.1",
|
"functional-red-black-tree": "^1.0.1",
|
||||||
"regexpp": "^3.0.0",
|
"regexpp": "^3.0.0",
|
||||||
"tsutils": "^3.17.1"
|
"tsutils": "^3.17.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@typescript-eslint/experimental-utils": {
|
"@typescript-eslint/experimental-utils": {
|
||||||
"version": "2.26.0",
|
"version": "2.27.0",
|
||||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-2.26.0.tgz",
|
"resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-2.27.0.tgz",
|
||||||
"integrity": "sha512-RELVoH5EYd+JlGprEyojUv9HeKcZqF7nZUGSblyAw1FwOGNnmQIU8kxJ69fttQvEwCsX5D6ECJT8GTozxrDKVQ==",
|
"integrity": "sha512-vOsYzjwJlY6E0NJRXPTeCGqjv5OHgRU1kzxHKWJVPjDYGbPgLudBXjIlc+OD1hDBZ4l1DLbOc5VjofKahsu9Jw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"requires": {
|
"requires": {
|
||||||
"@types/json-schema": "^7.0.3",
|
"@types/json-schema": "^7.0.3",
|
||||||
"@typescript-eslint/typescript-estree": "2.26.0",
|
"@typescript-eslint/typescript-estree": "2.27.0",
|
||||||
"eslint-scope": "^5.0.0",
|
"eslint-scope": "^5.0.0",
|
||||||
"eslint-utils": "^2.0.0"
|
"eslint-utils": "^2.0.0"
|
||||||
},
|
},
|
||||||
|
@ -150,21 +150,21 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@typescript-eslint/parser": {
|
"@typescript-eslint/parser": {
|
||||||
"version": "2.26.0",
|
"version": "2.27.0",
|
||||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-2.26.0.tgz",
|
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-2.27.0.tgz",
|
||||||
"integrity": "sha512-+Xj5fucDtdKEVGSh9353wcnseMRkPpEAOY96EEenN7kJVrLqy/EVwtIh3mxcUz8lsFXW1mT5nN5vvEam/a5HiQ==",
|
"integrity": "sha512-HFUXZY+EdwrJXZo31DW4IS1ujQW3krzlRjBrFRrJcMDh0zCu107/nRfhk/uBasO8m0NVDbBF5WZKcIUMRO7vPg==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"requires": {
|
"requires": {
|
||||||
"@types/eslint-visitor-keys": "^1.0.0",
|
"@types/eslint-visitor-keys": "^1.0.0",
|
||||||
"@typescript-eslint/experimental-utils": "2.26.0",
|
"@typescript-eslint/experimental-utils": "2.27.0",
|
||||||
"@typescript-eslint/typescript-estree": "2.26.0",
|
"@typescript-eslint/typescript-estree": "2.27.0",
|
||||||
"eslint-visitor-keys": "^1.1.0"
|
"eslint-visitor-keys": "^1.1.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@typescript-eslint/typescript-estree": {
|
"@typescript-eslint/typescript-estree": {
|
||||||
"version": "2.26.0",
|
"version": "2.27.0",
|
||||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.26.0.tgz",
|
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.27.0.tgz",
|
||||||
"integrity": "sha512-3x4SyZCLB4zsKsjuhxDLeVJN6W29VwBnYpCsZ7vIdPel9ZqLfIZJgJXO47MNUkurGpQuIBALdPQKtsSnWpE1Yg==",
|
"integrity": "sha512-t2miCCJIb/FU8yArjAvxllxbTiyNqaXJag7UOpB5DVoM3+xnjeOngtqlJkLRnMtzaRcJhe3CIR9RmL40omubhg==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"requires": {
|
"requires": {
|
||||||
"debug": "^4.1.1",
|
"debug": "^4.1.1",
|
||||||
|
@ -1367,9 +1367,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"regexpp": {
|
"regexpp": {
|
||||||
"version": "3.0.0",
|
"version": "3.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.1.0.tgz",
|
||||||
"integrity": "sha512-Z+hNr7RAVWxznLPuA7DIh8UNX1j9CDrUQxskw9IrBE1Dxue2lyXT+shqEIeLUjrokxIP8CMy1WkjgG3rTsd5/g==",
|
"integrity": "sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"resolve": {
|
"resolve": {
|
||||||
|
@ -1407,9 +1407,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"rollup": {
|
"rollup": {
|
||||||
"version": "2.3.2",
|
"version": "2.3.3",
|
||||||
"resolved": "https://registry.npmjs.org/rollup/-/rollup-2.3.2.tgz",
|
"resolved": "https://registry.npmjs.org/rollup/-/rollup-2.3.3.tgz",
|
||||||
"integrity": "sha512-p66+fbfaUUOGE84sHXAOgfeaYQMslgAazoQMp//nlR519R61213EPFgrMZa48j31jNacJwexSAR1Q8V/BwGKBA==",
|
"integrity": "sha512-uJ9VNWk80mb4wDCSfd1AyHoSc9TrWbkZtnO6wbsMTp9muSWkT26Dvc99MX1yGCOTvUN1Skw/KpFzKdUDuZKTXA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"requires": {
|
"requires": {
|
||||||
"fsevents": "~2.1.2"
|
"fsevents": "~2.1.2"
|
||||||
|
|
|
@ -21,7 +21,7 @@
|
||||||
"Programming Languages"
|
"Programming Languages"
|
||||||
],
|
],
|
||||||
"engines": {
|
"engines": {
|
||||||
"vscode": "^1.43.0"
|
"vscode": "^1.44.0"
|
||||||
},
|
},
|
||||||
"enableProposedApi": true,
|
"enableProposedApi": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
@ -42,10 +42,10 @@
|
||||||
"@types/node": "^12.12.34",
|
"@types/node": "^12.12.34",
|
||||||
"@types/node-fetch": "^2.5.5",
|
"@types/node-fetch": "^2.5.5",
|
||||||
"@types/vscode": "^1.43.0",
|
"@types/vscode": "^1.43.0",
|
||||||
"@typescript-eslint/eslint-plugin": "^2.26.0",
|
"@typescript-eslint/eslint-plugin": "^2.27.0",
|
||||||
"@typescript-eslint/parser": "^2.26.0",
|
"@typescript-eslint/parser": "^2.27.0",
|
||||||
"eslint": "^6.8.0",
|
"eslint": "^6.8.0",
|
||||||
"rollup": "^2.3.2",
|
"rollup": "^2.3.3",
|
||||||
"tslib": "^1.11.1",
|
"tslib": "^1.11.1",
|
||||||
"typescript": "^3.8.3",
|
"typescript": "^3.8.3",
|
||||||
"typescript-formatter": "^7.2.2",
|
"typescript-formatter": "^7.2.2",
|
||||||
|
@ -342,11 +342,6 @@
|
||||||
"default": true,
|
"default": true,
|
||||||
"description": "Show function name and docs in parameter hints"
|
"description": "Show function name and docs in parameter hints"
|
||||||
},
|
},
|
||||||
"rust-analyzer.highlighting.semanticTokens": {
|
|
||||||
"type": "boolean",
|
|
||||||
"default": false,
|
|
||||||
"description": "Use proposed semantic tokens API for syntax highlighting"
|
|
||||||
},
|
|
||||||
"rust-analyzer.updates.channel": {
|
"rust-analyzer.updates.channel": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": [
|
"enum": [
|
||||||
|
|
|
@ -1,11 +1,10 @@
|
||||||
import * as lc from 'vscode-languageclient';
|
import * as lc from 'vscode-languageclient';
|
||||||
import * as vscode from 'vscode';
|
import * as vscode from 'vscode';
|
||||||
|
|
||||||
import { Config } from './config';
|
|
||||||
import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed';
|
import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed';
|
||||||
import { SemanticTokensFeature, DocumentSemanticsTokensSignature } from 'vscode-languageclient/lib/semanticTokens.proposed';
|
import { SemanticTokensFeature, DocumentSemanticsTokensSignature } from 'vscode-languageclient/lib/semanticTokens.proposed';
|
||||||
|
|
||||||
export async function createClient(config: Config, serverPath: string, cwd: string): Promise<lc.LanguageClient> {
|
export async function createClient(serverPath: string, cwd: string): Promise<lc.LanguageClient> {
|
||||||
// '.' Is the fallback if no folder is open
|
// '.' Is the fallback if no folder is open
|
||||||
// TODO?: Workspace folders support Uri's (eg: file://test.txt).
|
// TODO?: Workspace folders support Uri's (eg: file://test.txt).
|
||||||
// It might be a good idea to test if the uri points to a file.
|
// It might be a good idea to test if the uri points to a file.
|
||||||
|
@ -73,15 +72,12 @@ export async function createClient(config: Config, serverPath: string, cwd: stri
|
||||||
};
|
};
|
||||||
|
|
||||||
// To turn on all proposed features use: res.registerProposedFeatures();
|
// To turn on all proposed features use: res.registerProposedFeatures();
|
||||||
// Here we want to just enable CallHierarchyFeature since it is available on stable.
|
// Here we want to enable CallHierarchyFeature and SemanticTokensFeature
|
||||||
// Note that while the CallHierarchyFeature is stable the LSP protocol is not.
|
// since they are available on stable.
|
||||||
|
// Note that while these features are stable in vscode their LSP protocol
|
||||||
|
// implementations are still in the "proposed" category for 3.16.
|
||||||
res.registerFeature(new CallHierarchyFeature(res));
|
res.registerFeature(new CallHierarchyFeature(res));
|
||||||
|
|
||||||
if (config.package.enableProposedApi) {
|
|
||||||
if (config.highlightingSemanticTokens) {
|
|
||||||
res.registerFeature(new SemanticTokensFeature(res));
|
res.registerFeature(new SemanticTokensFeature(res));
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,7 +69,6 @@ export class Config {
|
||||||
get serverPath() { return this.cfg.get<null | string>("serverPath")!; }
|
get serverPath() { return this.cfg.get<null | string>("serverPath")!; }
|
||||||
get channel() { return this.cfg.get<UpdatesChannel>("updates.channel")!; }
|
get channel() { return this.cfg.get<UpdatesChannel>("updates.channel")!; }
|
||||||
get askBeforeDownload() { return this.cfg.get<boolean>("updates.askBeforeDownload")!; }
|
get askBeforeDownload() { return this.cfg.get<boolean>("updates.askBeforeDownload")!; }
|
||||||
get highlightingSemanticTokens() { return this.cfg.get<boolean>("highlighting.semanticTokens")!; }
|
|
||||||
get traceExtension() { return this.cfg.get<boolean>("trace.extension")!; }
|
get traceExtension() { return this.cfg.get<boolean>("trace.extension")!; }
|
||||||
|
|
||||||
get inlayHints() {
|
get inlayHints() {
|
||||||
|
|
|
@ -21,7 +21,7 @@ export class Ctx {
|
||||||
serverPath: string,
|
serverPath: string,
|
||||||
cwd: string,
|
cwd: string,
|
||||||
): Promise<Ctx> {
|
): Promise<Ctx> {
|
||||||
const client = await createClient(config, serverPath, cwd);
|
const client = await createClient(serverPath, cwd);
|
||||||
const res = new Ctx(config, extCtx, client, serverPath);
|
const res = new Ctx(config, extCtx, client, serverPath);
|
||||||
res.pushCleanup(client.start());
|
res.pushCleanup(client.start());
|
||||||
await client.onReady();
|
await client.onReady();
|
||||||
|
|
|
@ -3,24 +3,20 @@ use std::path::PathBuf;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
not_bash::{fs2, pushd, rm_rf, run},
|
not_bash::{date_iso, fs2, pushd, rm_rf, run},
|
||||||
project_root,
|
project_root,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub struct ClientOpts {
|
pub fn run_dist(nightly: bool, client_version: Option<String>) -> Result<()> {
|
||||||
pub version: String,
|
|
||||||
pub release_tag: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn run_dist(client_opts: Option<ClientOpts>) -> Result<()> {
|
|
||||||
let dist = project_root().join("dist");
|
let dist = project_root().join("dist");
|
||||||
rm_rf(&dist)?;
|
rm_rf(&dist)?;
|
||||||
fs2::create_dir_all(&dist)?;
|
fs2::create_dir_all(&dist)?;
|
||||||
|
|
||||||
if let Some(ClientOpts { version, release_tag }) = client_opts {
|
if let Some(version) = client_version {
|
||||||
|
let release_tag = if nightly { "nightly".to_string() } else { date_iso()? };
|
||||||
dist_client(&version, &release_tag)?;
|
dist_client(&version, &release_tag)?;
|
||||||
}
|
}
|
||||||
dist_server()?;
|
dist_server(nightly)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,7 +46,7 @@ fn dist_client(version: &str, release_tag: &str) -> Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dist_server() -> Result<()> {
|
fn dist_server(nightly: bool) -> Result<()> {
|
||||||
if cfg!(target_os = "linux") {
|
if cfg!(target_os = "linux") {
|
||||||
std::env::set_var("CC", "clang");
|
std::env::set_var("CC", "clang");
|
||||||
run!(
|
run!(
|
||||||
|
@ -60,7 +56,9 @@ fn dist_server() -> Result<()> {
|
||||||
// We'd want to add, but that requires setting the right linker somehow
|
// We'd want to add, but that requires setting the right linker somehow
|
||||||
// --features=jemalloc
|
// --features=jemalloc
|
||||||
)?;
|
)?;
|
||||||
|
if !nightly {
|
||||||
run!("strip ./target/x86_64-unknown-linux-musl/release/rust-analyzer")?;
|
run!("strip ./target/x86_64-unknown-linux-musl/release/rust-analyzer")?;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
run!("cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release")?;
|
run!("cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release")?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ use walkdir::{DirEntry, WalkDir};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
codegen::Mode,
|
codegen::Mode,
|
||||||
not_bash::{fs2, pushd, rm_rf, run},
|
not_bash::{date_iso, fs2, pushd, rm_rf, run},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use anyhow::Result;
|
pub use anyhow::Result;
|
||||||
|
@ -180,7 +180,7 @@ pub fn run_release(dry_run: bool) -> Result<()> {
|
||||||
let website_root = project_root().join("../rust-analyzer.github.io");
|
let website_root = project_root().join("../rust-analyzer.github.io");
|
||||||
let changelog_dir = website_root.join("./thisweek/_posts");
|
let changelog_dir = website_root.join("./thisweek/_posts");
|
||||||
|
|
||||||
let today = run!("date --iso")?;
|
let today = date_iso()?;
|
||||||
let commit = run!("git rev-parse HEAD")?;
|
let commit = run!("git rev-parse HEAD")?;
|
||||||
let changelog_n = fs2::read_dir(changelog_dir.as_path())?.count();
|
let changelog_n = fs2::read_dir(changelog_dir.as_path())?.count();
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ use std::env;
|
||||||
use pico_args::Arguments;
|
use pico_args::Arguments;
|
||||||
use xtask::{
|
use xtask::{
|
||||||
codegen::{self, Mode},
|
codegen::{self, Mode},
|
||||||
dist::{run_dist, ClientOpts},
|
dist::run_dist,
|
||||||
install::{ClientOpt, InstallCmd, ServerOpt},
|
install::{ClientOpt, InstallCmd, ServerOpt},
|
||||||
not_bash::pushd,
|
not_bash::pushd,
|
||||||
pre_commit, project_root, run_clippy, run_fuzzer, run_pre_cache, run_release, run_rustfmt,
|
pre_commit, project_root, run_clippy, run_fuzzer, run_pre_cache, run_release, run_rustfmt,
|
||||||
|
@ -103,16 +103,10 @@ FLAGS:
|
||||||
run_release(dry_run)
|
run_release(dry_run)
|
||||||
}
|
}
|
||||||
"dist" => {
|
"dist" => {
|
||||||
let client_opts = if args.contains("--client") {
|
let nightly = args.contains("--nightly");
|
||||||
Some(ClientOpts {
|
let client_version: Option<String> = args.opt_value_from_str("--client")?;
|
||||||
version: args.value_from_str("--version")?,
|
|
||||||
release_tag: args.value_from_str("--tag")?,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
args.finish()?;
|
args.finish()?;
|
||||||
run_dist(client_opts)
|
run_dist(nightly, client_version)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
|
|
|
@ -94,6 +94,10 @@ pub fn run_process(cmd: String, echo: bool) -> Result<String> {
|
||||||
run_process_inner(&cmd, echo).with_context(|| format!("process `{}` failed", cmd))
|
run_process_inner(&cmd, echo).with_context(|| format!("process `{}` failed", cmd))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn date_iso() -> Result<String> {
|
||||||
|
run!("date --iso --utc")
|
||||||
|
}
|
||||||
|
|
||||||
fn run_process_inner(cmd: &str, echo: bool) -> Result<String> {
|
fn run_process_inner(cmd: &str, echo: bool) -> Result<String> {
|
||||||
let mut args = shelx(cmd);
|
let mut args = shelx(cmd);
|
||||||
let binary = args.remove(0);
|
let binary = args.remove(0);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue